Merge branch 'trunk' into HDFS-7240
diff --git a/LICENSE.txt b/LICENSE.txt
index 447c609..75c5562 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -693,6 +693,73 @@
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
 
+
+For:
+./hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/nvd3-1.8.5.* (css and js files)
+--------------------------------------------------------------------------------
+Copyright (c) 2011-2014 Novus Partners, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
+file except in compliance with the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software distributed under the
+ License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ express or implied. See the License for the specific language governing permissions and
+  limitations under the License.
+
+
+
+For:
+hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/angular-nvd3-1.0.9.min.js
+--------------------------------------------------------------------------------
+The MIT License (MIT)
+Copyright (c) 2014 Konstantin Skipor
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
+LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
+OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+For:
+hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/angular-1.6.4.min.js
+hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/angular-route-1.6.4.min.js
+--------------------------------------------------------------------------------
+The MIT License
+
+Copyright (c) 2010-2017 Google, Inc. http://angularjs.org
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+
+
 For:
 hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js
 hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
@@ -793,6 +860,7 @@
 
 For:
 hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/d3.v3.js
+hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/d3-3.5.17.min.js
 --------------------------------------------------------------------------------
 
 D3 is available under a 3-clause BSD license. For details, see:
diff --git a/dev-support/bin/dist-layout-stitching b/dev-support/bin/dist-layout-stitching
index f3db542..6557161 100755
--- a/dev-support/bin/dist-layout-stitching
+++ b/dev-support/bin/dist-layout-stitching
@@ -21,6 +21,9 @@
 # project.build.directory
 BASEDIR=$2
 
+#hdds.version
+HDDS_VERSION=$3
+
 function run()
 {
   declare res
@@ -132,7 +135,6 @@
 run copy "${ROOT}/hadoop-hdfs-project/hadoop-hdfs-rbf/target/hadoop-hdfs-rbf-${VERSION}" .
 run copy "${ROOT}/hadoop-yarn-project/target/hadoop-yarn-project-${VERSION}" .
 run copy "${ROOT}/hadoop-mapreduce-project/target/hadoop-mapreduce-${VERSION}" .
-run copy "${ROOT}/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${VERSION}" .
 
 #copy httpfs and kms as is
 run cp -pr "${ROOT}/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${VERSION}"/* .
@@ -144,6 +146,24 @@
 run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-runtime/target/hadoop-client-runtime-${VERSION}.jar" share/hadoop/client/
 run cp -p "${ROOT}/hadoop-client-modules/hadoop-client-minicluster/target/hadoop-client-minicluster-${VERSION}.jar" share/hadoop/client/
 
+# HDDS
+run copy "${ROOT}/hadoop-hdds/common/target/hadoop-hdds-common-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-hdds/framework/target/hadoop-hdds-server-framework-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-hdds/server-scm/target/hadoop-hdds-server-scm-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-hdds/container-service/target/hadoop-hdds-container-service-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-hdds/client/target/hadoop-hdds-client-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-hdds/tools/target/hadoop-hdds-tools-${HDDS_VERSION}" .
+
+# Ozone
+run copy "${ROOT}/hadoop-ozone/common/target/hadoop-ozone-common-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-ozone/ozone-manager/target/hadoop-ozone-ozone-manager-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-ozone/objectstore-service/target/hadoop-ozone-objectstore-service-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-ozone/client/target/hadoop-ozone-client-${HDDS_VERSION}" .
+run copy "${ROOT}/hadoop-ozone/tools/target/hadoop-ozone-tools-${HDDS_VERSION}" .
+
+run copy "${ROOT}/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${VERSION}" .
+
+
 echo
 echo "Hadoop dist layout available at: ${BASEDIR}/hadoop-${VERSION}"
 echo
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index d6bf779..369c606 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -198,6 +198,9 @@
 RUN echo 'forrest.home=/opt/apache-forrest' > build.properties
 ENV FORREST_HOME=/opt/apache-forrest
 
+# Hugo static website generator (for new hadoop site and Ozone docs)
+RUN curl -L -o hugo.deb https://github.com/gohugoio/hugo/releases/download/v0.30.2/hugo_0.30.2_Linux-64bit.deb && dpkg --install hugo.deb && rm hugo.deb
+
 # Add a welcome message and environment checks.
 ADD hadoop_env_checks.sh /root/hadoop_env_checks.sh
 RUN chmod 755 /root/hadoop_env_checks.sh
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-src-with-hdsl.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-src-with-hdsl.xml
new file mode 100644
index 0000000..b1e039f
--- /dev/null
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-src-with-hdsl.xml
@@ -0,0 +1,56 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3 http://maven.apache.org/xsd/assembly-1.1.3.xsd">
+  <id>hadoop-src</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>true</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>.</directory>
+      <includes>
+        <include>LICENCE.txt</include>
+        <include>README.txt</include>
+        <include>NOTICE.txt</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>.</directory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>.git/**</exclude>
+        <exclude>**/.gitignore</exclude>
+        <exclude>**/.svn</exclude>
+        <exclude>**/*.iws</exclude>
+        <exclude>**/*.ipr</exclude>
+        <exclude>**/*.iml</exclude>
+        <exclude>**/.classpath</exclude>
+        <exclude>**/.project</exclude>
+        <exclude>**/.settings</exclude>
+        <exclude>**/target/**</exclude>
+        <!-- until the code that does this is fixed -->
+        <exclude>**/*.log</exclude>
+        <exclude>**/build/**</exclude>
+        <exclude>**/file:/**</exclude>
+        <exclude>**/SecurityAuth.audit*</exclude>
+      </excludes>
+    </fileSet>
+  </fileSets>
+</assembly>
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
index b1e039f..f0a8d44 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
@@ -50,6 +50,8 @@
         <exclude>**/build/**</exclude>
         <exclude>**/file:/**</exclude>
         <exclude>**/SecurityAuth.audit*</exclude>
+        <exclude>hadoop-ozone/**</exclude>
+        <exclude>hadoop-hdds/**</exclude>
       </excludes>
     </fileSet>
   </fileSets>
diff --git a/hadoop-client-modules/hadoop-client-check-test-invariants/src/test/resources/ensure-jars-have-correct-contents.sh b/hadoop-client-modules/hadoop-client-check-test-invariants/src/test/resources/ensure-jars-have-correct-contents.sh
index fb9f4f9..f8c6a15 100644
--- a/hadoop-client-modules/hadoop-client-check-test-invariants/src/test/resources/ensure-jars-have-correct-contents.sh
+++ b/hadoop-client-modules/hadoop-client-check-test-invariants/src/test/resources/ensure-jars-have-correct-contents.sh
@@ -43,6 +43,12 @@
 #   * Used by JavaSandboxLinuxContainerRuntime as a default, loaded
 #     from root, so can't relocate. :(
 allowed_expr+="|^java.policy$"
+# * allowing native libraries from rocksdb. Leaving native libraries as it is.
+allowed_expr+="|^librocksdbjni-linux32.so"
+allowed_expr+="|^librocksdbjni-linux64.so"
+allowed_expr+="|^librocksdbjni-osx.jnilib"
+allowed_expr+="|^librocksdbjni-win64.dll"
+allowed_expr+="|^librocksdbjni-linux-ppc64le.so"
 
 
 allowed_expr+=")"
diff --git a/hadoop-client-modules/hadoop-client-minicluster/pom.xml b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
index a443648..da519a3 100644
--- a/hadoop-client-modules/hadoop-client-minicluster/pom.xml
+++ b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
@@ -647,6 +647,13 @@
                         <exclude>xml.xsd</exclude>
                       </excludes>
                     </filter>
+                    <!-- filtering HISTORY-JAVA.md from rocksdb jar -->
+                    <filter>
+                    <artifact>org.rocksdb:rocksdbjni</artifact>
+                    <excludes>
+                      <exclude>HISTORY-JAVA.md</exclude>
+                    </excludes>
+                    </filter>
                     <filter>
                       <!-- skip jetty license info already incorporated into LICENSE/NOTICE -->
                       <artifact>org.eclipse.jetty:*</artifact>
diff --git a/hadoop-client-modules/hadoop-client-runtime/pom.xml b/hadoop-client-modules/hadoop-client-runtime/pom.xml
index 363adf5..532fae9 100644
--- a/hadoop-client-modules/hadoop-client-runtime/pom.xml
+++ b/hadoop-client-modules/hadoop-client-runtime/pom.xml
@@ -157,6 +157,7 @@
                       <!-- Leave javax APIs that are stable -->
                       <!-- the jdk ships part of the javax.annotation namespace, so if we want to relocate this we'll have to care it out by class :( -->
                       <exclude>com.google.code.findbugs:jsr305</exclude>
+                      <exclude>io.dropwizard.metrics:metrics-core</exclude>
                     </excludes>
                   </artifactSet>
                   <filters>
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index 9ef48b6..bee1430 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -596,6 +596,11 @@
   YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
   MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
   MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
+  HDDS_DIR=${HDDS_DIR:-"share/hadoop/hdds"}
+  HDDS_LIB_JARS_DIR=${HDDS_LIB_JARS_DIR:-"share/hadoop/hdds/lib"}
+  OZONE_DIR=${OZONE_DIR:-"share/hadoop/ozone"}
+  OZONE_LIB_JARS_DIR=${OZONE_LIB_JARS_DIR:-"share/hadoop/ozone/lib"}
+
   HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
   HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
   HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
index 24aacdf..6573a81 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
@@ -402,7 +402,24 @@
 # and therefore may override any similar flags set in HADOOP_OPTS
 #
 # export HDFS_DFSROUTER_OPTS=""
+
 ###
+# HDFS Key Space Manager specific parameters
+###
+# Specify the JVM options to be used when starting the HDFS Key Space Manager.
+# These options will be appended to the options specified as HADOOP_OPTS
+# and therefore may override any similar flags set in HADOOP_OPTS
+#
+# export HDFS_KSM_OPTS=""
+
+###
+# HDFS StorageContainerManager specific parameters
+###
+# Specify the JVM options to be used when starting the HDFS Storage Container Manager.
+# These options will be appended to the options specified as HADOOP_OPTS
+# and therefore may override any similar flags set in HADOOP_OPTS
+#
+# export HDFS_STORAGECONTAINERMANAGER_OPTS=""
 
 ###
 # Advanced Users Only!
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
index 6933cf4..5783013 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
+++ b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
@@ -293,6 +293,40 @@
 log4j.appender.EWMA.messageAgeLimitSeconds=${yarn.ewma.messageAgeLimitSeconds}
 log4j.appender.EWMA.maxUniqueMessages=${yarn.ewma.maxUniqueMessages}
 
+
+# Fair scheduler requests log on state dump
+log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler.statedump=DEBUG,FSLOGGER
+log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler.statedump=false
+log4j.appender.FSLOGGER=org.apache.log4j.RollingFileAppender
+log4j.appender.FSLOGGER.File=${hadoop.log.dir}/fairscheduler-statedump.log
+log4j.appender.FSLOGGER.layout=org.apache.log4j.PatternLayout
+log4j.appender.FSLOGGER.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.appender.FSLOGGER.MaxFileSize=${hadoop.log.maxfilesize}
+log4j.appender.FSLOGGER.MaxBackupIndex=${hadoop.log.maxbackupindex}
+
+#
+# Add a logger for ozone that is separate from the Datanode.
+#
+log4j.logger.org.apache.hadoop.ozone=DEBUG,OZONE,FILE
+
+# Do not log into datanode logs. Remove this line to have single log.
+log4j.additivity.org.apache.hadoop.ozone=false
+
+# For development purposes, log both to console and log file.
+log4j.appender.OZONE=org.apache.log4j.ConsoleAppender
+log4j.appender.OZONE.Threshold=info
+log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout
+log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+ %X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n
+
+# Real ozone logger that writes to ozone.log
+log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log
+log4j.appender.FILE.Threshold=debug
+log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
+log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+(%F:%L) %X{function} %X{resource} %X{user} %X{request} - \
+%m%n
 #
 # Fair scheduler state dump
 #
diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index f32268b..fd72618 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -1685,6 +1685,19 @@
 </property>
 
 
+<!-- Ozone file system properties -->
+<property>
+  <name>fs.o3.impl</name>
+  <value>org.apache.hadoop.fs.ozone.OzoneFileSystem</value>
+  <description>The implementation class of the Ozone FileSystem.</description>
+</property>
+
+<property>
+  <name>fs.AbstractFileSystem.o3.impl</name>
+  <value>org.apache.hadoop.fs.ozone.OzFs</value>
+  <description>The implementation class of the OzFs AbstractFileSystem.</description>
+</property>
+
 <!-- ipc properties -->
 
 <property>
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
index 6ca9c78..023c831 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
@@ -101,6 +101,9 @@
     // S3A properties are in a different subtree.
     xmlPrefixToSkipCompare.add("fs.s3a.");
 
+    // O3 properties are in a different subtree.
+    xmlPrefixToSkipCompare.add("fs.o3.");
+
     //ftp properties are in a different subtree.
     // - org.apache.hadoop.fs.ftp.FTPFileSystem.
     xmlPrefixToSkipCompare.add("fs.ftp.impl");
diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml
index 8a0453f..f9b8573 100644
--- a/hadoop-dist/pom.xml
+++ b/hadoop-dist/pom.xml
@@ -68,6 +68,8 @@
       <artifactId>hadoop-client-integration-tests</artifactId>
       <scope>provided</scope>
     </dependency>
+
+
   </dependencies>
 
   <build>
@@ -132,6 +134,7 @@
       <!-- Disable the sign plugin, since there isn't anything to sign -->
       <plugin>
         <artifactId>maven-gpg-plugin</artifactId>
+        <version>${maven-gpg-plugin.version}</version>
         <executions>
           <execution>
             <id>sign-artifacts</id>
@@ -171,6 +174,7 @@
                     <argument>${basedir}/../dev-support/bin/dist-layout-stitching</argument>
                     <argument>${project.version}</argument>
                     <argument>${project.build.directory}</argument>
+                    <argument>${hdds.version}</argument>
                   </arguments>
                 </configuration>
               </execution>
@@ -214,6 +218,85 @@
         </plugins>
       </build>
     </profile>
+
+    <profile>
+      <id>hdds</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <dependencies>
+
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-ozone-ozone-manager</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdds-server-scm</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdds-tools</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdds-container-service</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-ozone-objectstore-service</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdds-tools</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-ozone-tools</artifactId>
+        </dependency>
+      </dependencies>
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-resources-plugin</artifactId>
+              <executions>
+                <execution>
+                  <id>copy-docker-compose</id>
+                  <goals>
+                    <goal>copy-resources</goal>
+                  </goals>
+                  <phase>prepare-package</phase>
+                  <configuration>
+                    <outputDirectory>${project.build.directory}/compose</outputDirectory>
+                    <resources>
+                      <resource>
+                        <directory>src/main/compose</directory>
+                        <filtering>true</filtering>
+                      </resource>
+                    </resources>
+                  </configuration>
+                </execution>
+                <execution>
+                  <id>copy-dockerfile</id>
+                  <goals>
+                    <goal>copy-resources</goal>
+                  </goals>
+                  <phase>prepare-package</phase>
+                  <configuration>
+                    <outputDirectory>${project.build.directory}</outputDirectory>
+                    <resources>
+                      <resource>
+                        <directory>src/main/docker</directory>
+                        <filtering>true</filtering>
+                      </resource>
+                    </resources>
+                  </configuration>
+                </execution>
+              </executions>
+            </plugin>
+        </plugins>
+      </build>
+    </profile>
   </profiles>
 
 </project>
diff --git a/hadoop-dist/src/main/compose/ozone/.env b/hadoop-dist/src/main/compose/ozone/.env
new file mode 100644
index 0000000..af20d3e
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozone/.env
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+VERSION=${project.version}
\ No newline at end of file
diff --git a/hadoop-dist/src/main/compose/ozone/docker-compose.yaml b/hadoop-dist/src/main/compose/ozone/docker-compose.yaml
new file mode 100644
index 0000000..13a7db6
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozone/docker-compose.yaml
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+version: "3"
+services:
+   namenode:
+      image: apache/hadoop-runner
+      hostname: namenode
+      volumes:
+         - ../..//hadoop-${VERSION}:/opt/hadoop
+      ports:
+         - 9870:9870
+      environment:
+          ENSURE_NAMENODE_DIR: /data/namenode
+      env_file:
+         - ./docker-config
+      command: ["/opt/hadoop/bin/hdfs","namenode"]
+   datanode:
+      image: apache/hadoop-runner
+      volumes:
+        - ../..//hadoop-${VERSION}:/opt/hadoop
+      ports:
+        - 9864
+      command: ["/opt/hadoop/bin/ozone","datanode"]
+      env_file:
+        - ./docker-config
+   ksm:
+      image: apache/hadoop-runner
+      volumes:
+         - ../..//hadoop-${VERSION}:/opt/hadoop
+      ports:
+         - 9874:9874
+      environment:
+         ENSURE_KSM_INITIALIZED: /data/metadata/ksm/current/VERSION
+      env_file:
+          - ./docker-config
+      command: ["/opt/hadoop/bin/ozone","ksm"]
+   scm:
+      image: apache/hadoop-runner
+      volumes:
+         - ../..//hadoop-${VERSION}:/opt/hadoop
+      ports:
+         - 9876:9876
+      env_file:
+          - ./docker-config
+      environment:
+          ENSURE_SCM_INITIALIZED: /data/metadata/scm/current/VERSION
+      command: ["/opt/hadoop/bin/ozone","scm"]
diff --git a/hadoop-dist/src/main/compose/ozone/docker-config b/hadoop-dist/src/main/compose/ozone/docker-config
new file mode 100644
index 0000000..c693db0
--- /dev/null
+++ b/hadoop-dist/src/main/compose/ozone/docker-config
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CORE-SITE.XML_fs.defaultFS=hdfs://namenode:9000
+OZONE-SITE.XML_ozone.ksm.address=ksm
+OZONE-SITE.XML_ozone.scm.names=scm
+OZONE-SITE.XML_ozone.enabled=True
+OZONE-SITE.XML_ozone.scm.datanode.id=/data/datanode.id
+OZONE-SITE.XML_ozone.scm.block.client.address=scm
+OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata
+OZONE-SITE.XML_ozone.handler.type=distributed
+OZONE-SITE.XML_ozone.scm.client.address=scm
+OZONE-SITE.XML_hdds.datanode.plugins=org.apache.hadoop.ozone.web.OzoneHddsDatanodeService
+HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000
+HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode
+HDFS-SITE.XML_rpc.metrics.quantile.enable=true
+HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
+HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.HddsDatanodeService
+LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
+LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
diff --git a/hadoop-hdds/client/pom.xml b/hadoop-hdds/client/pom.xml
new file mode 100644
index 0000000..d2efec4
--- /dev/null
+++ b/hadoop-hdds/client/pom.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-hdds</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-hdds-client</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Distributed Data Store Client libraries</description>
+  <name>Apache HDDS Client</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdds</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>io.netty</groupId>
+      <artifactId>netty-all</artifactId>
+    </dependency>
+
+  </dependencies>
+</project>
\ No newline at end of file
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java
new file mode 100644
index 0000000..5c702c6
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.netty.bootstrap.Bootstrap;
+import io.netty.channel.Channel;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.nio.NioSocketChannel;
+import io.netty.handler.logging.LogLevel;
+import io.netty.handler.logging.LoggingHandler;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Semaphore;
+
+/**
+ * A Client for the storageContainer protocol.
+ */
+public class XceiverClient extends XceiverClientSpi {
+  static final Logger LOG = LoggerFactory.getLogger(XceiverClient.class);
+  private final Pipeline pipeline;
+  private final Configuration config;
+  private Channel channel;
+  private Bootstrap b;
+  private EventLoopGroup group;
+  private final Semaphore semaphore;
+
+  /**
+   * Constructs a client that can communicate with the Container framework on
+   * data nodes.
+   *
+   * @param pipeline - Pipeline that defines the machines.
+   * @param config -- Ozone Config
+   */
+  public XceiverClient(Pipeline pipeline, Configuration config) {
+    super();
+    Preconditions.checkNotNull(pipeline);
+    Preconditions.checkNotNull(config);
+    this.pipeline = pipeline;
+    this.config = config;
+    this.semaphore =
+        new Semaphore(HddsClientUtils.getMaxOutstandingRequests(config));
+  }
+
+  @Override
+  public void connect() throws Exception {
+    if (channel != null && channel.isActive()) {
+      throw new IOException("This client is already connected to a host.");
+    }
+
+    group = new NioEventLoopGroup();
+    b = new Bootstrap();
+    b.group(group)
+        .channel(NioSocketChannel.class)
+        .handler(new LoggingHandler(LogLevel.INFO))
+        .handler(new XceiverClientInitializer(this.pipeline, semaphore));
+    DatanodeDetails leader = this.pipeline.getLeader();
+
+    // read port from the data node, on failure use default configured
+    // port.
+    int port = leader.getContainerPort();
+    if (port == 0) {
+      port = config.getInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+          OzoneConfigKeys.DFS_CONTAINER_IPC_PORT_DEFAULT);
+    }
+    LOG.debug("Connecting to server Port : " + port);
+    channel = b.connect(leader.getHostName(), port).sync().channel();
+  }
+
+  /**
+   * Returns if the exceiver client connects to a server.
+   *
+   * @return True if the connection is alive, false otherwise.
+   */
+  @VisibleForTesting
+  public boolean isConnected() {
+    return channel.isActive();
+  }
+
+  @Override
+  public void close() {
+    if (group != null) {
+      group.shutdownGracefully().awaitUninterruptibly();
+    }
+  }
+
+  @Override
+  public Pipeline getPipeline() {
+    return pipeline;
+  }
+
+  @Override
+  public ContainerProtos.ContainerCommandResponseProto sendCommand(
+      ContainerProtos.ContainerCommandRequestProto request) throws IOException {
+    try {
+      if ((channel == null) || (!channel.isActive())) {
+        throw new IOException("This channel is not connected.");
+      }
+      XceiverClientHandler handler =
+          channel.pipeline().get(XceiverClientHandler.class);
+
+      return handler.sendCommand(request);
+    } catch (ExecutionException | InterruptedException e) {
+      /**
+       * In case the netty channel handler throws an exception,
+       * the exception thrown will be wrapped within {@link ExecutionException}.
+       * Unwarpping here so that original exception gets passed
+       * to to the client.
+       */
+      if (e instanceof ExecutionException) {
+        Throwable cause = e.getCause();
+        if (cause instanceof IOException) {
+          throw (IOException) cause;
+        }
+      }
+      throw new IOException(
+          "Unexpected exception during execution:" + e.getMessage());
+    }
+  }
+
+  /**
+   * Sends a given command to server gets a waitable future back.
+   *
+   * @param request Request
+   * @return Response to the command
+   * @throws IOException
+   */
+  @Override
+  public CompletableFuture<ContainerProtos.ContainerCommandResponseProto>
+      sendCommandAsync(ContainerProtos.ContainerCommandRequestProto request)
+      throws IOException, ExecutionException, InterruptedException {
+    if ((channel == null) || (!channel.isActive())) {
+      throw new IOException("This channel is not connected.");
+    }
+    XceiverClientHandler handler =
+        channel.pipeline().get(XceiverClientHandler.class);
+    return handler.sendCommandAsync(request);
+  }
+
+  /**
+   * Create a pipeline.
+   *
+   * @param pipelineID - Name of the pipeline.
+   * @param datanodes - Datanodes
+   */
+  @Override
+  public void createPipeline(String pipelineID, List<DatanodeDetails> datanodes)
+      throws IOException {
+    // For stand alone pipeline, there is no notion called setup pipeline.
+    return;
+  }
+
+  /**
+   * Returns pipeline Type.
+   *
+   * @return - Stand Alone as the type.
+   */
+  @Override
+  public HddsProtos.ReplicationType getPipelineType() {
+    return HddsProtos.ReplicationType.STAND_ALONE;
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java
new file mode 100644
index 0000000..e2b55ac
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+
+import com.google.common.base.Preconditions;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Iterator;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.Semaphore;
+
+/**
+ * Netty client handler.
+ */
+public class XceiverClientHandler extends
+    SimpleChannelInboundHandler<ContainerCommandResponseProto> {
+
+  static final Logger LOG = LoggerFactory.getLogger(XceiverClientHandler.class);
+  private final ConcurrentMap<String, ResponseFuture> responses =
+      new ConcurrentHashMap<>();
+
+  private final Pipeline pipeline;
+  private volatile Channel channel;
+  private XceiverClientMetrics metrics;
+  private final Semaphore semaphore;
+
+  /**
+   * Constructs a client that can communicate to a container server.
+   */
+  public XceiverClientHandler(Pipeline pipeline, Semaphore semaphore) {
+    super(false);
+    Preconditions.checkNotNull(pipeline);
+    this.pipeline = pipeline;
+    this.metrics = XceiverClientManager.getXceiverClientMetrics();
+    this.semaphore = semaphore;
+  }
+
+  /**
+   * <strong>Please keep in mind that this method will be renamed to {@code
+   * messageReceived(ChannelHandlerContext, I)} in 5.0.</strong>
+   * <p>
+   * Is called for each message of type {@link ContainerProtos
+   * .ContainerCommandResponseProto}.
+   *
+   * @param ctx the {@link ChannelHandlerContext} which this {@link
+   * SimpleChannelInboundHandler} belongs to
+   * @param msg the message to handle
+   * @throws Exception is thrown if an error occurred
+   */
+  @Override
+  public void channelRead0(ChannelHandlerContext ctx,
+      ContainerProtos.ContainerCommandResponseProto msg)
+      throws Exception {
+    Preconditions.checkNotNull(msg);
+    metrics.decrPendingContainerOpsMetrics(msg.getCmdType());
+
+    String key = msg.getTraceID();
+    ResponseFuture response = responses.remove(key);
+    semaphore.release();
+
+    if (response != null) {
+      response.getFuture().complete(msg);
+
+      long requestTime = response.getRequestTime();
+      metrics.addContainerOpsLatency(msg.getCmdType(),
+          Time.monotonicNowNanos() - requestTime);
+    } else {
+      LOG.error("A reply received for message that was not queued. trace " +
+          "ID: {}", msg.getTraceID());
+    }
+  }
+
+  @Override
+  public void channelRegistered(ChannelHandlerContext ctx) {
+    LOG.debug("channelRegistered: Connected to ctx");
+    channel = ctx.channel();
+  }
+
+  @Override
+  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
+    LOG.info("Exception in client " + cause.toString());
+    Iterator<String> keyIterator = responses.keySet().iterator();
+    while (keyIterator.hasNext()) {
+      ResponseFuture response = responses.remove(keyIterator.next());
+      response.getFuture().completeExceptionally(cause);
+      semaphore.release();
+    }
+    ctx.close();
+  }
+
+  /**
+   * Since netty is async, we send a work request and then wait until a response
+   * appears in the reply queue. This is simple sync interface for clients. we
+   * should consider building async interfaces for client if this turns out to
+   * be a performance bottleneck.
+   *
+   * @param request - request.
+   * @return -- response
+   */
+
+  public ContainerCommandResponseProto sendCommand(
+      ContainerProtos.ContainerCommandRequestProto request)
+      throws ExecutionException, InterruptedException {
+    Future<ContainerCommandResponseProto> future = sendCommandAsync(request);
+    return future.get();
+  }
+
+  /**
+   * SendCommandAsyc queues a command to the Netty Subsystem and returns a
+   * CompletableFuture. This Future is marked compeleted in the channelRead0
+   * when the call comes back.
+   * @param request - Request to execute
+   * @return CompletableFuture
+   */
+  public CompletableFuture<ContainerCommandResponseProto> sendCommandAsync(
+      ContainerProtos.ContainerCommandRequestProto request)
+      throws InterruptedException {
+
+    // Throw an exception of request doesn't have traceId
+    if (StringUtils.isEmpty(request.getTraceID())) {
+      throw new IllegalArgumentException("Invalid trace ID");
+    }
+
+    // Setting the datanode ID in the commands, so that we can distinguish
+    // commands when the cluster simulator is running.
+    if(!request.hasDatanodeUuid()) {
+      throw new IllegalArgumentException("Invalid Datanode ID");
+    }
+
+    metrics.incrPendingContainerOpsMetrics(request.getCmdType());
+
+    CompletableFuture<ContainerCommandResponseProto> future
+        = new CompletableFuture<>();
+    ResponseFuture response = new ResponseFuture(future,
+        Time.monotonicNowNanos());
+    semaphore.acquire();
+    ResponseFuture previous = responses.putIfAbsent(
+        request.getTraceID(), response);
+    if (previous != null) {
+      LOG.error("Command with Trace already exists. Ignoring this command. " +
+              "{}. Previous Command: {}", request.getTraceID(),
+          previous.toString());
+      throw new IllegalStateException("Duplicate trace ID. Command with this " +
+          "trace ID is already executing. Please ensure that " +
+          "trace IDs are not reused. ID: " + request.getTraceID());
+    }
+
+    channel.writeAndFlush(request);
+    return response.getFuture();
+  }
+
+  /**
+   * Class wraps response future info.
+   */
+  static class ResponseFuture {
+    private final long requestTime;
+    private final CompletableFuture<ContainerCommandResponseProto> future;
+
+    ResponseFuture(CompletableFuture<ContainerCommandResponseProto> future,
+        long requestTime) {
+      this.future = future;
+      this.requestTime = requestTime;
+    }
+
+    public long getRequestTime() {
+      return requestTime;
+    }
+
+    public CompletableFuture<ContainerCommandResponseProto> getFuture() {
+      return future;
+    }
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java
new file mode 100644
index 0000000..e10a9f6
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.handler.codec.protobuf.ProtobufDecoder;
+import io.netty.handler.codec.protobuf.ProtobufEncoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+
+import java.util.concurrent.Semaphore;
+
+/**
+ * Setup the netty pipeline.
+ */
+public class XceiverClientInitializer extends
+    ChannelInitializer<SocketChannel> {
+  private final Pipeline pipeline;
+  private final Semaphore semaphore;
+
+  /**
+   * Constructs an Initializer for the client pipeline.
+   * @param pipeline  - Pipeline.
+   */
+  public XceiverClientInitializer(Pipeline pipeline, Semaphore semaphore) {
+    this.pipeline = pipeline;
+    this.semaphore = semaphore;
+  }
+
+  /**
+   * This method will be called once when the Channel is registered. After
+   * the method returns this instance will be removed from the
+   * ChannelPipeline of the Channel.
+   *
+   * @param ch   Channel which was registered.
+   * @throws Exception is thrown if an error occurs. In that case the
+   *                   Channel will be closed.
+   */
+  @Override
+  protected void initChannel(SocketChannel ch) throws Exception {
+    ChannelPipeline p = ch.pipeline();
+
+    p.addLast(new ProtobufVarint32FrameDecoder());
+    p.addLast(new ProtobufDecoder(ContainerProtos
+        .ContainerCommandResponseProto.getDefaultInstance()));
+
+    p.addLast(new ProtobufVarint32LengthFieldPrepender());
+    p.addLast(new ProtobufEncoder());
+
+    p.addLast(new XceiverClientHandler(this.pipeline, this.semaphore));
+
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java
new file mode 100644
index 0000000..7585104
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.RemovalListener;
+import com.google.common.cache.RemovalNotification;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .SCM_CONTAINER_CLIENT_MAX_SIZE_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_KEY;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos
+    .ReplicationType.RATIS;
+
+/**
+ * XceiverClientManager is responsible for the lifecycle of XceiverClient
+ * instances.  Callers use this class to acquire an XceiverClient instance
+ * connected to the desired container pipeline.  When done, the caller also uses
+ * this class to release the previously acquired XceiverClient instance.
+ *
+ *
+ * This class caches connection to container for reuse purpose, such that
+ * accessing same container frequently will be through the same connection
+ * without reestablishing connection. But the connection will be closed if
+ * not being used for a period of time.
+ */
+public class XceiverClientManager implements Closeable {
+
+  //TODO : change this to SCM configuration class
+  private final Configuration conf;
+  private final Cache<String, XceiverClientSpi> clientCache;
+  private final boolean useRatis;
+
+  private static XceiverClientMetrics metrics;
+  /**
+   * Creates a new XceiverClientManager.
+   *
+   * @param conf configuration
+   */
+  public XceiverClientManager(Configuration conf) {
+    Preconditions.checkNotNull(conf);
+    int maxSize = conf.getInt(SCM_CONTAINER_CLIENT_MAX_SIZE_KEY,
+        SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT);
+    long staleThresholdMs = conf.getTimeDuration(
+        SCM_CONTAINER_CLIENT_STALE_THRESHOLD_KEY,
+        SCM_CONTAINER_CLIENT_STALE_THRESHOLD_DEFAULT, TimeUnit.MILLISECONDS);
+    this.useRatis = conf.getBoolean(
+        ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY,
+        ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT);
+    this.conf = conf;
+    this.clientCache = CacheBuilder.newBuilder()
+        .expireAfterAccess(staleThresholdMs, TimeUnit.MILLISECONDS)
+        .maximumSize(maxSize)
+        .removalListener(
+            new RemovalListener<String, XceiverClientSpi>() {
+            @Override
+            public void onRemoval(
+                RemovalNotification<String, XceiverClientSpi>
+                  removalNotification) {
+              synchronized (clientCache) {
+                // Mark the entry as evicted
+                XceiverClientSpi info = removalNotification.getValue();
+                info.setEvicted();
+              }
+            }
+          }).build();
+  }
+
+  @VisibleForTesting
+  public Cache<String, XceiverClientSpi> getClientCache() {
+    return clientCache;
+  }
+
+  /**
+   * Acquires a XceiverClientSpi connected to a container capable of
+   * storing the specified key.
+   *
+   * If there is already a cached XceiverClientSpi, simply return
+   * the cached otherwise create a new one.
+   *
+   * @param pipeline the container pipeline for the client connection
+   * @return XceiverClientSpi connected to a container
+   * @throws IOException if a XceiverClientSpi cannot be acquired
+   */
+  public XceiverClientSpi acquireClient(Pipeline pipeline)
+      throws IOException {
+    Preconditions.checkNotNull(pipeline);
+    Preconditions.checkArgument(pipeline.getMachines() != null);
+    Preconditions.checkArgument(!pipeline.getMachines().isEmpty());
+
+    synchronized (clientCache) {
+      XceiverClientSpi info = getClient(pipeline);
+      info.incrementReference();
+      return info;
+    }
+  }
+
+  /**
+   * Releases a XceiverClientSpi after use.
+   *
+   * @param client client to release
+   */
+  public void releaseClient(XceiverClientSpi client) {
+    Preconditions.checkNotNull(client);
+    synchronized (clientCache) {
+      client.decrementReference();
+    }
+  }
+
+  private XceiverClientSpi getClient(Pipeline pipeline)
+      throws IOException {
+    String containerName = pipeline.getContainerName();
+    try {
+      return clientCache.get(containerName,
+          new Callable<XceiverClientSpi>() {
+          @Override
+          public XceiverClientSpi call() throws Exception {
+            XceiverClientSpi client = pipeline.getType() == RATIS ?
+                    XceiverClientRatis.newXceiverClientRatis(pipeline, conf)
+                    : new XceiverClient(pipeline, conf);
+            client.connect();
+            return client;
+          }
+        });
+    } catch (Exception e) {
+      throw new IOException(
+          "Exception getting XceiverClient: " + e.toString(), e);
+    }
+  }
+
+  /**
+   * Close and remove all the cached clients.
+   */
+  public void close() {
+    //closing is done through RemovalListener
+    clientCache.invalidateAll();
+    clientCache.cleanUp();
+
+    if (metrics != null) {
+      metrics.unRegister();
+    }
+  }
+
+  /**
+   * Tells us if Ratis is enabled for this cluster.
+   * @return True if Ratis is enabled.
+   */
+  public boolean isUseRatis() {
+    return useRatis;
+  }
+
+  /**
+   * Returns hard coded 3 as replication factor.
+   * @return 3
+   */
+  public  HddsProtos.ReplicationFactor getFactor() {
+    if(isUseRatis()) {
+      return HddsProtos.ReplicationFactor.THREE;
+    }
+    return HddsProtos.ReplicationFactor.ONE;
+  }
+
+  /**
+   * Returns the default replication type.
+   * @return Ratis or Standalone
+   */
+  public HddsProtos.ReplicationType getType() {
+    // TODO : Fix me and make Ratis default before release.
+    // TODO: Remove this as replication factor and type are pipeline properties
+    if(isUseRatis()) {
+      return HddsProtos.ReplicationType.RATIS;
+    }
+    return HddsProtos.ReplicationType.STAND_ALONE;
+  }
+
+  /**
+   * Get xceiver client metric.
+   */
+  public synchronized static XceiverClientMetrics getXceiverClientMetrics() {
+    if (metrics == null) {
+      metrics = XceiverClientMetrics.create();
+    }
+
+    return metrics;
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java
new file mode 100644
index 0000000..a61eba1
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableRate;
+
+/**
+ * The client metrics for the Storage Container protocol.
+ */
+@InterfaceAudience.Private
+@Metrics(about = "Storage Container Client Metrics", context = "dfs")
+public class XceiverClientMetrics {
+  public static final String SOURCE_NAME = XceiverClientMetrics.class
+      .getSimpleName();
+
+  private @Metric MutableCounterLong pendingOps;
+  private MutableCounterLong[] pendingOpsArray;
+  private MutableRate[] containerOpsLatency;
+  private MetricsRegistry registry;
+
+  public XceiverClientMetrics() {
+    int numEnumEntries = ContainerProtos.Type.values().length;
+    this.registry = new MetricsRegistry(SOURCE_NAME);
+
+    this.pendingOpsArray = new MutableCounterLong[numEnumEntries];
+    this.containerOpsLatency = new MutableRate[numEnumEntries];
+    for (int i = 0; i < numEnumEntries; i++) {
+      pendingOpsArray[i] = registry.newCounter(
+          "numPending" + ContainerProtos.Type.valueOf(i + 1),
+          "number of pending" + ContainerProtos.Type.valueOf(i + 1) + " ops",
+          (long) 0);
+
+      containerOpsLatency[i] = registry.newRate(
+          ContainerProtos.Type.valueOf(i + 1) + "Latency",
+          "latency of " + ContainerProtos.Type.valueOf(i + 1)
+          + " ops");
+    }
+  }
+
+  public static XceiverClientMetrics create() {
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    return ms.register(SOURCE_NAME, "Storage Container Client Metrics",
+        new XceiverClientMetrics());
+  }
+
+  public void incrPendingContainerOpsMetrics(ContainerProtos.Type type) {
+    pendingOps.incr();
+    pendingOpsArray[type.ordinal()].incr();
+  }
+
+  public void decrPendingContainerOpsMetrics(ContainerProtos.Type type) {
+    pendingOps.incr(-1);
+    pendingOpsArray[type.ordinal()].incr(-1);
+  }
+
+  public void addContainerOpsLatency(ContainerProtos.Type type,
+      long latencyNanos) {
+    containerOpsLatency[type.ordinal()].add(latencyNanos);
+  }
+
+  public long getContainerOpsMetrics(ContainerProtos.Type type) {
+    return pendingOpsArray[type.ordinal()].value();
+  }
+
+  public void unRegister() {
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    ms.unregisterSource(SOURCE_NAME);
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java
new file mode 100644
index 0000000..d010c69
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java
@@ -0,0 +1,266 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import com.google.common.base.Preconditions;
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.ratis.RatisHelper;
+import org.apache.ratis.client.RaftClient;
+import org.apache.ratis.protocol.RaftClientReply;
+import org.apache.ratis.protocol.RaftGroup;
+import org.apache.ratis.protocol.RaftPeer;
+import org.apache.ratis.rpc.RpcType;
+import org.apache.ratis.rpc.SupportedRpcType;
+import org.apache.ratis.shaded.com.google.protobuf.ByteString;
+import org.apache.ratis.shaded.com.google.protobuf.ShadedProtoUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CompletionException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * An abstract implementation of {@link XceiverClientSpi} using Ratis.
+ * The underlying RPC mechanism can be chosen via the constructor.
+ */
+public final class XceiverClientRatis extends XceiverClientSpi {
+  static final Logger LOG = LoggerFactory.getLogger(XceiverClientRatis.class);
+
+  public static XceiverClientRatis newXceiverClientRatis(
+      Pipeline pipeline, Configuration ozoneConf) {
+    final String rpcType = ozoneConf.get(
+        ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_KEY,
+        ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_DEFAULT);
+    final int maxOutstandingRequests =
+        HddsClientUtils.getMaxOutstandingRequests(ozoneConf);
+    return new XceiverClientRatis(pipeline,
+        SupportedRpcType.valueOfIgnoreCase(rpcType), maxOutstandingRequests);
+  }
+
+  private final Pipeline pipeline;
+  private final RpcType rpcType;
+  private final AtomicReference<RaftClient> client = new AtomicReference<>();
+  private final int maxOutstandingRequests;
+
+  /**
+   * Constructs a client.
+   */
+  private XceiverClientRatis(Pipeline pipeline, RpcType rpcType,
+      int maxOutStandingChunks) {
+    super();
+    this.pipeline = pipeline;
+    this.rpcType = rpcType;
+    this.maxOutstandingRequests = maxOutStandingChunks;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  public void createPipeline(String clusterId, List<DatanodeDetails> datanodes)
+      throws IOException {
+    RaftGroup group = RatisHelper.newRaftGroup(datanodes);
+    LOG.debug("initializing pipeline:{} with nodes:{}", clusterId,
+        group.getPeers());
+    reinitialize(datanodes, group);
+  }
+
+  /**
+   * Returns Ratis as pipeline Type.
+   *
+   * @return - Ratis
+   */
+  @Override
+  public HddsProtos.ReplicationType getPipelineType() {
+    return HddsProtos.ReplicationType.RATIS;
+  }
+
+  private void reinitialize(List<DatanodeDetails> datanodes, RaftGroup group)
+      throws IOException {
+    if (datanodes.isEmpty()) {
+      return;
+    }
+
+    IOException exception = null;
+    for (DatanodeDetails d : datanodes) {
+      try {
+        reinitialize(d, group);
+      } catch (IOException ioe) {
+        if (exception == null) {
+          exception = new IOException(
+              "Failed to reinitialize some of the RaftPeer(s)", ioe);
+        } else {
+          exception.addSuppressed(ioe);
+        }
+      }
+    }
+    if (exception != null) {
+      throw exception;
+    }
+  }
+
+  /**
+   * Adds a new peers to the Ratis Ring.
+   *
+   * @param datanode - new datanode
+   * @param group    - Raft group
+   * @throws IOException - on Failure.
+   */
+  private void reinitialize(DatanodeDetails datanode, RaftGroup group)
+      throws IOException {
+    final RaftPeer p = RatisHelper.toRaftPeer(datanode);
+    try (RaftClient client = RatisHelper.newRaftClient(rpcType, p)) {
+      client.reinitialize(group, p.getId());
+    } catch (IOException ioe) {
+      LOG.error("Failed to reinitialize RaftPeer:{} datanode: {}  ",
+          p, datanode, ioe);
+      throw new IOException("Failed to reinitialize RaftPeer " + p
+          + "(datanode=" + datanode + ")", ioe);
+    }
+  }
+
+  @Override
+  public Pipeline getPipeline() {
+    return pipeline;
+  }
+
+  @Override
+  public void connect() throws Exception {
+    LOG.debug("Connecting to pipeline:{} leader:{}",
+        getPipeline().getPipelineName(),
+        RatisHelper.toRaftPeerId(pipeline.getLeader()));
+    // TODO : XceiverClient ratis should pass the config value of
+    // maxOutstandingRequests so as to set the upper bound on max no of async
+    // requests to be handled by raft client
+    if (!client.compareAndSet(null,
+        RatisHelper.newRaftClient(rpcType, getPipeline()))) {
+      throw new IllegalStateException("Client is already connected.");
+    }
+  }
+
+  @Override
+  public void close() {
+    final RaftClient c = client.getAndSet(null);
+    if (c != null) {
+      try {
+        c.close();
+      } catch (IOException e) {
+        throw new IllegalStateException(e);
+      }
+    }
+  }
+
+  private RaftClient getClient() {
+    return Objects.requireNonNull(client.get(), "client is null");
+  }
+
+  private boolean isReadOnly(ContainerCommandRequestProto proto) {
+    switch (proto.getCmdType()) {
+    case ReadContainer:
+    case ReadChunk:
+    case ListKey:
+    case GetKey:
+    case GetSmallFile:
+    case ListContainer:
+    case ListChunk:
+      return true;
+    case CloseContainer:
+    case WriteChunk:
+    case UpdateContainer:
+    case CompactChunk:
+    case CreateContainer:
+    case DeleteChunk:
+    case DeleteContainer:
+    case DeleteKey:
+    case PutKey:
+    case PutSmallFile:
+    default:
+      return false;
+    }
+  }
+
+  private RaftClientReply sendRequest(ContainerCommandRequestProto request)
+      throws IOException {
+    boolean isReadOnlyRequest = isReadOnly(request);
+    ByteString byteString =
+        ShadedProtoUtil.asShadedByteString(request.toByteArray());
+    LOG.debug("sendCommand {} {}", isReadOnlyRequest, request);
+    final RaftClientReply reply =  isReadOnlyRequest ?
+        getClient().sendReadOnly(() -> byteString) :
+        getClient().send(() -> byteString);
+    LOG.debug("reply {} {}", isReadOnlyRequest, reply);
+    return reply;
+  }
+
+  private CompletableFuture<RaftClientReply> sendRequestAsync(
+      ContainerCommandRequestProto request) throws IOException {
+    boolean isReadOnlyRequest = isReadOnly(request);
+    ByteString byteString =
+        ShadedProtoUtil.asShadedByteString(request.toByteArray());
+    LOG.debug("sendCommandAsync {} {}", isReadOnlyRequest, request);
+    return isReadOnlyRequest ? getClient().sendReadOnlyAsync(() -> byteString) :
+        getClient().sendAsync(() -> byteString);
+  }
+
+  @Override
+  public ContainerCommandResponseProto sendCommand(
+      ContainerCommandRequestProto request) throws IOException {
+    final RaftClientReply reply = sendRequest(request);
+    Preconditions.checkState(reply.isSuccess());
+    return ContainerCommandResponseProto.parseFrom(
+        ShadedProtoUtil.asByteString(reply.getMessage().getContent()));
+  }
+
+  /**
+   * Sends a given command to server gets a waitable future back.
+   *
+   * @param request Request
+   * @return Response to the command
+   * @throws IOException
+   */
+  @Override
+  public CompletableFuture<ContainerCommandResponseProto> sendCommandAsync(
+      ContainerCommandRequestProto request)
+      throws IOException, ExecutionException, InterruptedException {
+    return sendRequestAsync(request).whenComplete((reply, e) ->
+          LOG.debug("received reply {} for request: {} exception: {}", request,
+              reply, e))
+        .thenApply(reply -> {
+          try {
+            return ContainerCommandResponseProto.parseFrom(
+                ShadedProtoUtil.asByteString(reply.getMessage().getContent()));
+          } catch (InvalidProtocolBufferException e) {
+            throw new CompletionException(e);
+          }
+        });
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java
new file mode 100644
index 0000000..8f30a7f
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java
@@ -0,0 +1,407 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.client;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.UUID;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState
+    .ALLOCATED;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState
+    .OPEN;
+
+/**
+ * This class provides the client-facing APIs of container operations.
+ */
+public class ContainerOperationClient implements ScmClient {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerOperationClient.class);
+  private static long containerSizeB = -1;
+  private final StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private final XceiverClientManager xceiverClientManager;
+
+  public ContainerOperationClient(
+      StorageContainerLocationProtocolClientSideTranslatorPB
+          storageContainerLocationClient,
+      XceiverClientManager xceiverClientManager) {
+    this.storageContainerLocationClient = storageContainerLocationClient;
+    this.xceiverClientManager = xceiverClientManager;
+  }
+
+  /**
+   * Return the capacity of containers. The current assumption is that all
+   * containers have the same capacity. Therefore one static is sufficient for
+   * any container.
+   * @return The capacity of one container in number of bytes.
+   */
+  public static long getContainerSizeB() {
+    return containerSizeB;
+  }
+
+  /**
+   * Set the capacity of container. Should be exactly once on system start.
+   * @param size Capacity of one container in number of bytes.
+   */
+  public static void setContainerSizeB(long size) {
+    containerSizeB = size;
+  }
+
+  /**
+   * @inheritDoc
+   */
+  @Override
+  public Pipeline createContainer(String containerId, String owner)
+      throws IOException {
+    XceiverClientSpi client = null;
+    try {
+      Pipeline pipeline =
+          storageContainerLocationClient.allocateContainer(
+              xceiverClientManager.getType(),
+              xceiverClientManager.getFactor(), containerId, owner);
+      client = xceiverClientManager.acquireClient(pipeline);
+
+      // Allocated State means that SCM has allocated this pipeline in its
+      // namespace. The client needs to create the pipeline on the machines
+      // which was choosen by the SCM.
+      Preconditions.checkState(pipeline.getLifeCycleState() == ALLOCATED ||
+          pipeline.getLifeCycleState() == OPEN, "Unexpected pipeline state");
+      if (pipeline.getLifeCycleState() == ALLOCATED) {
+        createPipeline(client, pipeline);
+      }
+      // TODO : Container Client State needs to be updated.
+      // TODO : Return ContainerInfo instead of Pipeline
+      createContainer(containerId, client, pipeline);
+      return pipeline;
+    } finally {
+      if (client != null) {
+        xceiverClientManager.releaseClient(client);
+      }
+    }
+  }
+
+  /**
+   * Create a container over pipeline specified by the SCM.
+   *
+   * @param containerId - Container ID
+   * @param client - Client to communicate with Datanodes
+   * @param pipeline - A pipeline that is already created.
+   * @throws IOException
+   */
+  public void createContainer(String containerId, XceiverClientSpi client,
+      Pipeline pipeline) throws IOException {
+    String traceID = UUID.randomUUID().toString();
+    storageContainerLocationClient.notifyObjectStageChange(
+        ObjectStageChangeRequestProto.Type.container,
+        containerId,
+        ObjectStageChangeRequestProto.Op.create,
+        ObjectStageChangeRequestProto.Stage.begin);
+    ContainerProtocolCalls.createContainer(client, traceID);
+    storageContainerLocationClient.notifyObjectStageChange(
+        ObjectStageChangeRequestProto.Type.container,
+        containerId,
+        ObjectStageChangeRequestProto.Op.create,
+        ObjectStageChangeRequestProto.Stage.complete);
+
+    // Let us log this info after we let SCM know that we have completed the
+    // creation state.
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Created container " + containerId
+          + " leader:" + pipeline.getLeader()
+          + " machines:" + pipeline.getMachines());
+    }
+  }
+
+  /**
+   * Creates a pipeline over the machines choosen by the SCM.
+   *
+   * @param client - Client
+   * @param pipeline - pipeline to be createdon Datanodes.
+   * @throws IOException
+   */
+  private void createPipeline(XceiverClientSpi client, Pipeline pipeline)
+      throws IOException {
+
+    Preconditions.checkNotNull(pipeline.getPipelineName(), "Pipeline " +
+        "name cannot be null when client create flag is set.");
+
+    // Pipeline creation is a three step process.
+    //
+    // 1. Notify SCM that this client is doing a create pipeline on
+    // datanodes.
+    //
+    // 2. Talk to Datanodes to create the pipeline.
+    //
+    // 3. update SCM that pipeline creation was successful.
+    storageContainerLocationClient.notifyObjectStageChange(
+        ObjectStageChangeRequestProto.Type.pipeline,
+        pipeline.getPipelineName(),
+        ObjectStageChangeRequestProto.Op.create,
+        ObjectStageChangeRequestProto.Stage.begin);
+
+    client.createPipeline(pipeline.getPipelineName(),
+        pipeline.getMachines());
+
+    storageContainerLocationClient.notifyObjectStageChange(
+        ObjectStageChangeRequestProto.Type.pipeline,
+        pipeline.getPipelineName(),
+        ObjectStageChangeRequestProto.Op.create,
+        ObjectStageChangeRequestProto.Stage.complete);
+
+    // TODO : Should we change the state on the client side ??
+    // That makes sense, but it is not needed for the client to work.
+    LOG.debug("Pipeline creation successful. Pipeline: {}",
+        pipeline.toString());
+  }
+
+  /**
+   * @inheritDoc
+   */
+  @Override
+  public Pipeline createContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor,
+      String containerId, String owner) throws IOException {
+    XceiverClientSpi client = null;
+    try {
+      // allocate container on SCM.
+      Pipeline pipeline =
+          storageContainerLocationClient.allocateContainer(type, factor,
+              containerId, owner);
+      client = xceiverClientManager.acquireClient(pipeline);
+
+      // Allocated State means that SCM has allocated this pipeline in its
+      // namespace. The client needs to create the pipeline on the machines
+      // which was choosen by the SCM.
+      if (pipeline.getLifeCycleState() == ALLOCATED) {
+        createPipeline(client, pipeline);
+      }
+
+      // TODO : Return ContainerInfo instead of Pipeline
+      // connect to pipeline leader and allocate container on leader datanode.
+      client = xceiverClientManager.acquireClient(pipeline);
+      createContainer(containerId, client, pipeline);
+      return pipeline;
+    } finally {
+      if (client != null) {
+        xceiverClientManager.releaseClient(client);
+      }
+    }
+  }
+
+  /**
+   * Returns a set of Nodes that meet a query criteria.
+   *
+   * @param nodeStatuses - A set of criteria that we want the node to have.
+   * @param queryScope - Query scope - Cluster or pool.
+   * @param poolName - if it is pool, a pool name is required.
+   * @return A set of nodes that meet the requested criteria.
+   * @throws IOException
+   */
+  @Override
+  public HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState>
+      nodeStatuses, HddsProtos.QueryScope queryScope, String poolName)
+      throws IOException {
+    return storageContainerLocationClient.queryNode(nodeStatuses, queryScope,
+        poolName);
+  }
+
+  /**
+   * Creates a specified replication pipeline.
+   */
+  @Override
+  public Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
+      throws IOException {
+    return storageContainerLocationClient.createReplicationPipeline(type,
+        factor, nodePool);
+  }
+
+  /**
+   * Delete the container, this will release any resource it uses.
+   * @param pipeline - Pipeline that represents the container.
+   * @param force - True to forcibly delete the container.
+   * @throws IOException
+   */
+  @Override
+  public void deleteContainer(Pipeline pipeline, boolean force)
+      throws IOException {
+    XceiverClientSpi client = null;
+    try {
+      client = xceiverClientManager.acquireClient(pipeline);
+      String traceID = UUID.randomUUID().toString();
+      ContainerProtocolCalls.deleteContainer(client, force, traceID);
+      storageContainerLocationClient
+          .deleteContainer(pipeline.getContainerName());
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Deleted container {}, leader: {}, machines: {} ",
+            pipeline.getContainerName(),
+            pipeline.getLeader(),
+            pipeline.getMachines());
+      }
+    } finally {
+      if (client != null) {
+        xceiverClientManager.releaseClient(client);
+      }
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<ContainerInfo> listContainer(String startName,
+      String prefixName, int count)
+      throws IOException {
+    return storageContainerLocationClient.listContainer(
+        startName, prefixName, count);
+  }
+
+  /**
+   * Get meta data from an existing container.
+   *
+   * @param pipeline - pipeline that represents the container.
+   * @return ContainerInfo - a message of protobuf which has basic info
+   * of a container.
+   * @throws IOException
+   */
+  @Override
+  public ContainerData readContainer(Pipeline pipeline) throws IOException {
+    XceiverClientSpi client = null;
+    try {
+      client = xceiverClientManager.acquireClient(pipeline);
+      String traceID = UUID.randomUUID().toString();
+      ReadContainerResponseProto response =
+          ContainerProtocolCalls.readContainer(client,
+              pipeline.getContainerName(), traceID);
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Read container {}, leader: {}, machines: {} ",
+            pipeline.getContainerName(),
+            pipeline.getLeader(),
+            pipeline.getMachines());
+      }
+      return response.getContainerData();
+    } finally {
+      if (client != null) {
+        xceiverClientManager.releaseClient(client);
+      }
+    }
+  }
+
+  /**
+   * Given an id, return the pipeline associated with the container.
+   * @param containerId - String Container ID
+   * @return Pipeline of the existing container, corresponding to the given id.
+   * @throws IOException
+   */
+  @Override
+  public Pipeline getContainer(String containerId) throws
+      IOException {
+    return storageContainerLocationClient.getContainer(containerId);
+  }
+
+  /**
+   * Close a container.
+   *
+   * @param pipeline the container to be closed.
+   * @throws IOException
+   */
+  @Override
+  public void closeContainer(Pipeline pipeline) throws IOException {
+    XceiverClientSpi client = null;
+    try {
+      LOG.debug("Close container {}", pipeline);
+      /*
+      TODO: two orders here, revisit this later:
+      1. close on SCM first, then on data node
+      2. close on data node first, then on SCM
+
+      with 1: if client failed after closing on SCM, then there is a
+      container SCM thinks as closed, but is actually open. Then SCM will no
+      longer allocate block to it, which is fine. But SCM may later try to
+      replicate this "closed" container, which I'm not sure is safe.
+
+      with 2: if client failed after close on datanode, then there is a
+      container SCM thinks as open, but is actually closed. Then SCM will still
+      try to allocate block to it. Which will fail when actually doing the
+      write. No more data can be written, but at least the correctness and
+      consistency of existing data will maintain.
+
+      For now, take the #2 way.
+       */
+      // Actually close the container on Datanode
+      client = xceiverClientManager.acquireClient(pipeline);
+      String traceID = UUID.randomUUID().toString();
+
+      String containerId = pipeline.getContainerName();
+
+      storageContainerLocationClient.notifyObjectStageChange(
+          ObjectStageChangeRequestProto.Type.container,
+          containerId,
+          ObjectStageChangeRequestProto.Op.close,
+          ObjectStageChangeRequestProto.Stage.begin);
+
+      ContainerProtocolCalls.closeContainer(client, traceID);
+      // Notify SCM to close the container
+      storageContainerLocationClient.notifyObjectStageChange(
+          ObjectStageChangeRequestProto.Type.container,
+          containerId,
+          ObjectStageChangeRequestProto.Op.close,
+          ObjectStageChangeRequestProto.Stage.complete);
+    } finally {
+      if (client != null) {
+        xceiverClientManager.releaseClient(client);
+      }
+    }
+  }
+
+  /**
+   * Get the the current usage information.
+   * @param pipeline - Pipeline
+   * @return the size of the given container.
+   * @throws IOException
+   */
+  @Override
+  public long getContainerSize(Pipeline pipeline) throws IOException {
+    // TODO : Pipeline can be null, handle it correctly.
+    long size = getContainerSizeB();
+    if (size == -1) {
+      throw new IOException("Container size unknown!");
+    }
+    return size;
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
new file mode 100644
index 0000000..bc5f8d6
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
@@ -0,0 +1,232 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.client;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.ParseException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Utility methods for Ozone and Container Clients.
+ *
+ * The methods to retrieve SCM service endpoints assume there is a single
+ * SCM service instance. This will change when we switch to replicated service
+ * instances for redundancy.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public final class HddsClientUtils {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      HddsClientUtils.class);
+
+  private static final int NO_PORT = -1;
+
+  private HddsClientUtils() {
+  }
+
+  /**
+   * Date format that used in ozone. Here the format is thread safe to use.
+   */
+  private static final ThreadLocal<DateTimeFormatter> DATE_FORMAT =
+      ThreadLocal.withInitial(() -> {
+        DateTimeFormatter format =
+            DateTimeFormatter.ofPattern(OzoneConsts.OZONE_DATE_FORMAT);
+        return format.withZone(ZoneId.of(OzoneConsts.OZONE_TIME_ZONE));
+      });
+
+
+  /**
+   * Convert time in millisecond to a human readable format required in ozone.
+   * @return a human readable string for the input time
+   */
+  public static String formatDateTime(long millis) {
+    ZonedDateTime dateTime = ZonedDateTime.ofInstant(
+        Instant.ofEpochSecond(millis), DATE_FORMAT.get().getZone());
+    return DATE_FORMAT.get().format(dateTime);
+  }
+
+  /**
+   * Convert time in ozone date format to millisecond.
+   * @return time in milliseconds
+   */
+  public static long formatDateTime(String date) throws ParseException {
+    Preconditions.checkNotNull(date, "Date string should not be null.");
+    return ZonedDateTime.parse(date, DATE_FORMAT.get())
+        .toInstant().getEpochSecond();
+  }
+
+
+
+  /**
+   * verifies that bucket name / volume name is a valid DNS name.
+   *
+   * @param resName Bucket or volume Name to be validated
+   *
+   * @throws IllegalArgumentException
+   */
+  public static void verifyResourceName(String resName)
+      throws IllegalArgumentException {
+
+    if (resName == null) {
+      throw new IllegalArgumentException("Bucket or Volume name is null");
+    }
+
+    if ((resName.length() < OzoneConsts.OZONE_MIN_BUCKET_NAME_LENGTH) ||
+        (resName.length() > OzoneConsts.OZONE_MAX_BUCKET_NAME_LENGTH)) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume length is illegal, " +
+              "valid length is 3-63 characters");
+    }
+
+    if ((resName.charAt(0) == '.') || (resName.charAt(0) == '-')) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume name cannot start with a period or dash");
+    }
+
+    if ((resName.charAt(resName.length() - 1) == '.') ||
+        (resName.charAt(resName.length() - 1) == '-')) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume name cannot end with a period or dash");
+    }
+
+    boolean isIPv4 = true;
+    char prev = (char) 0;
+
+    for (int index = 0; index < resName.length(); index++) {
+      char currChar = resName.charAt(index);
+
+      if (currChar != '.') {
+        isIPv4 = ((currChar >= '0') && (currChar <= '9')) && isIPv4;
+      }
+
+      if (currChar > 'A' && currChar < 'Z') {
+        throw new IllegalArgumentException(
+            "Bucket or Volume name does not support uppercase characters");
+      }
+
+      if ((currChar != '.') && (currChar != '-')) {
+        if ((currChar < '0') || (currChar > '9' && currChar < 'a') ||
+            (currChar > 'z')) {
+          throw new IllegalArgumentException("Bucket or Volume name has an " +
+              "unsupported character : " +
+              currChar);
+        }
+      }
+
+      if ((prev == '.') && (currChar == '.')) {
+        throw new IllegalArgumentException("Bucket or Volume name should not " +
+            "have two contiguous periods");
+      }
+
+      if ((prev == '-') && (currChar == '.')) {
+        throw new IllegalArgumentException(
+            "Bucket or Volume name should not have period after dash");
+      }
+
+      if ((prev == '.') && (currChar == '-')) {
+        throw new IllegalArgumentException(
+            "Bucket or Volume name should not have dash after period");
+      }
+      prev = currChar;
+    }
+
+    if (isIPv4) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume name cannot be an IPv4 address or all numeric");
+    }
+  }
+
+  /**
+   * Returns the cache value to be used for list calls.
+   * @param conf Configuration object
+   * @return list cache size
+   */
+  public static int getListCacheSize(Configuration conf) {
+    return conf.getInt(OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE,
+        OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE_DEFAULT);
+  }
+
+  /**
+   * @return a default instance of {@link CloseableHttpClient}.
+   */
+  public static CloseableHttpClient newHttpClient() {
+    return HddsClientUtils.newHttpClient(new Configuration());
+  }
+
+  /**
+   * Returns a {@link CloseableHttpClient} configured by given configuration.
+   * If conf is null, returns a default instance.
+   *
+   * @param conf configuration
+   * @return a {@link CloseableHttpClient} instance.
+   */
+  public static CloseableHttpClient newHttpClient(Configuration conf) {
+    long socketTimeout = OzoneConfigKeys
+        .OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT;
+    long connectionTimeout = OzoneConfigKeys
+        .OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT;
+    if (conf != null) {
+      socketTimeout = conf.getTimeDuration(
+          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT,
+          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT,
+          TimeUnit.MILLISECONDS);
+      connectionTimeout = conf.getTimeDuration(
+          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT,
+          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT,
+          TimeUnit.MILLISECONDS);
+    }
+
+    CloseableHttpClient client = HttpClients.custom()
+        .setDefaultRequestConfig(
+            RequestConfig.custom()
+                .setSocketTimeout(Math.toIntExact(socketTimeout))
+                .setConnectTimeout(Math.toIntExact(connectionTimeout))
+                .build())
+        .build();
+    return client;
+  }
+
+  /**
+   * Returns the maximum no of outstanding async requests to be handled by
+   * Standalone and Ratis client.
+   */
+  public static int getMaxOutstandingRequests(Configuration config) {
+    return config
+        .getInt(ScmConfigKeys.SCM_CONTAINER_CLIENT_MAX_OUTSTANDING_REQUESTS,
+            ScmConfigKeys
+                .SCM_CONTAINER_CLIENT_MAX_OUTSTANDING_REQUESTS_DEFAULT);
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
new file mode 100644
index 0000000..73ad78c
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.client;
+
+/**
+ * Client facing classes for the container operations.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
new file mode 100644
index 0000000..9390bc1
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+/**
+ * Classes for different type of container service client.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
new file mode 100644
index 0000000..9b8eaa9
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.storage;
+
+import com.google.protobuf.ByteString;
+import org.apache.hadoop.fs.Seekable;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadChunkResponseProto;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * An {@link InputStream} used by the REST service in combination with the
+ * SCMClient to read the value of a key from a sequence
+ * of container chunks.  All bytes of the key value are stored in container
+ * chunks.  Each chunk may contain multiple underlying {@link ByteBuffer}
+ * instances.  This class encapsulates all state management for iterating
+ * through the sequence of chunks and the sequence of buffers within each chunk.
+ */
+public class ChunkInputStream extends InputStream implements Seekable {
+
+  private static final int EOF = -1;
+
+  private final String key;
+  private final String traceID;
+  private XceiverClientManager xceiverClientManager;
+  private XceiverClientSpi xceiverClient;
+  private List<ChunkInfo> chunks;
+  private int chunkIndex;
+  private long[] chunkOffset;
+  private List<ByteBuffer> buffers;
+  private int bufferIndex;
+
+  /**
+   * Creates a new ChunkInputStream.
+   *
+   * @param key chunk key
+   * @param xceiverClientManager client manager that controls client
+   * @param xceiverClient client to perform container calls
+   * @param chunks list of chunks to read
+   * @param traceID container protocol call traceID
+   */
+  public ChunkInputStream(String key, XceiverClientManager xceiverClientManager,
+      XceiverClientSpi xceiverClient, List<ChunkInfo> chunks, String traceID) {
+    this.key = key;
+    this.traceID = traceID;
+    this.xceiverClientManager = xceiverClientManager;
+    this.xceiverClient = xceiverClient;
+    this.chunks = chunks;
+    this.chunkIndex = -1;
+    // chunkOffset[i] stores offset at which chunk i stores data in
+    // ChunkInputStream
+    this.chunkOffset = new long[this.chunks.size()];
+    initializeChunkOffset();
+    this.buffers = null;
+    this.bufferIndex = 0;
+  }
+
+  private void initializeChunkOffset() {
+    int tempOffset = 0;
+    for (int i = 0; i < chunks.size(); i++) {
+      chunkOffset[i] = tempOffset;
+      tempOffset += chunks.get(i).getLen();
+    }
+  }
+
+  @Override
+  public synchronized int read()
+      throws IOException {
+    checkOpen();
+    int available = prepareRead(1);
+    return available == EOF ? EOF :
+        Byte.toUnsignedInt(buffers.get(bufferIndex).get());
+  }
+
+  @Override
+  public synchronized int read(byte[] b, int off, int len) throws IOException {
+    // According to the JavaDocs for InputStream, it is recommended that
+    // subclasses provide an override of bulk read if possible for performance
+    // reasons.  In addition to performance, we need to do it for correctness
+    // reasons.  The Ozone REST service uses PipedInputStream and
+    // PipedOutputStream to relay HTTP response data between a Jersey thread and
+    // a Netty thread.  It turns out that PipedInputStream/PipedOutputStream
+    // have a subtle dependency (bug?) on the wrapped stream providing separate
+    // implementations of single-byte read and bulk read.  Without this, get key
+    // responses might close the connection before writing all of the bytes
+    // advertised in the Content-Length.
+    if (b == null) {
+      throw new NullPointerException();
+    }
+    if (off < 0 || len < 0 || len > b.length - off) {
+      throw new IndexOutOfBoundsException();
+    }
+    if (len == 0) {
+      return 0;
+    }
+    checkOpen();
+    int available = prepareRead(len);
+    if (available == EOF) {
+      return EOF;
+    }
+    buffers.get(bufferIndex).get(b, off, available);
+    return available;
+  }
+
+  @Override
+  public synchronized void close() {
+    if (xceiverClientManager != null && xceiverClient != null) {
+      xceiverClientManager.releaseClient(xceiverClient);
+      xceiverClientManager = null;
+      xceiverClient = null;
+    }
+  }
+
+  /**
+   * Checks if the stream is open.  If not, throws an exception.
+   *
+   * @throws IOException if stream is closed
+   */
+  private synchronized void checkOpen() throws IOException {
+    if (xceiverClient == null) {
+      throw new IOException("ChunkInputStream has been closed.");
+    }
+  }
+
+  /**
+   * Prepares to read by advancing through chunks and buffers as needed until it
+   * finds data to return or encounters EOF.
+   *
+   * @param len desired length of data to read
+   * @return length of data available to read, possibly less than desired length
+   */
+  private synchronized int prepareRead(int len) throws IOException {
+    for (;;) {
+      if (chunks == null || chunks.isEmpty()) {
+        // This must be an empty key.
+        return EOF;
+      } else if (buffers == null) {
+        // The first read triggers fetching the first chunk.
+        readChunkFromContainer();
+      } else if (!buffers.isEmpty() &&
+          buffers.get(bufferIndex).hasRemaining()) {
+        // Data is available from the current buffer.
+        ByteBuffer bb = buffers.get(bufferIndex);
+        return len > bb.remaining() ? bb.remaining() : len;
+      } else if (!buffers.isEmpty() &&
+          !buffers.get(bufferIndex).hasRemaining() &&
+          bufferIndex < buffers.size() - 1) {
+        // There are additional buffers available.
+        ++bufferIndex;
+      } else if (chunkIndex < chunks.size() - 1) {
+        // There are additional chunks available.
+        readChunkFromContainer();
+      } else {
+        // All available input has been consumed.
+        return EOF;
+      }
+    }
+  }
+
+  /**
+   * Attempts to read the chunk at the specified offset in the chunk list.  If
+   * successful, then the data of the read chunk is saved so that its bytes can
+   * be returned from subsequent read calls.
+   *
+   * @throws IOException if there is an I/O error while performing the call
+   */
+  private synchronized void readChunkFromContainer() throws IOException {
+    // On every chunk read chunkIndex should be increased so as to read the
+    // next chunk
+    chunkIndex += 1;
+    final ReadChunkResponseProto readChunkResponse;
+    try {
+      readChunkResponse = ContainerProtocolCalls.readChunk(xceiverClient,
+          chunks.get(chunkIndex), key, traceID);
+    } catch (IOException e) {
+      throw new IOException("Unexpected OzoneException: " + e.toString(), e);
+    }
+    ByteString byteString = readChunkResponse.getData();
+    buffers = byteString.asReadOnlyByteBufferList();
+    bufferIndex = 0;
+  }
+
+  @Override
+  public synchronized void seek(long pos) throws IOException {
+    if (pos < 0 || (chunks.size() == 0 && pos > 0)
+        || pos >= chunkOffset[chunks.size() - 1] + chunks.get(chunks.size() - 1)
+        .getLen()) {
+      throw new EOFException(
+          "EOF encountered pos: " + pos + " container key: " + key);
+    }
+    if (chunkIndex == -1) {
+      chunkIndex = Arrays.binarySearch(chunkOffset, pos);
+    } else if (pos < chunkOffset[chunkIndex]) {
+      chunkIndex = Arrays.binarySearch(chunkOffset, 0, chunkIndex, pos);
+    } else if (pos >= chunkOffset[chunkIndex] + chunks.get(chunkIndex)
+        .getLen()) {
+      chunkIndex =
+          Arrays.binarySearch(chunkOffset, chunkIndex + 1, chunks.size(), pos);
+    }
+    if (chunkIndex < 0) {
+      // Binary search returns -insertionPoint - 1  if element is not present
+      // in the array. insertionPoint is the point at which element would be
+      // inserted in the sorted array. We need to adjust the chunkIndex
+      // accordingly so that chunkIndex = insertionPoint - 1
+      chunkIndex = -chunkIndex -2;
+    }
+    // adjust chunkIndex so that readChunkFromContainer reads the correct chunk
+    chunkIndex -= 1;
+    readChunkFromContainer();
+    adjustBufferIndex(pos);
+  }
+
+  private void adjustBufferIndex(long pos) {
+    long tempOffest = chunkOffset[chunkIndex];
+    for (int i = 0; i < buffers.size(); i++) {
+      if (pos - tempOffest >= buffers.get(i).capacity()) {
+        tempOffest += buffers.get(i).capacity();
+      } else {
+        bufferIndex = i;
+        break;
+      }
+    }
+    buffers.get(bufferIndex).position((int) (pos - tempOffest));
+  }
+
+  @Override
+  public synchronized long getPos() throws IOException {
+    return chunkIndex == -1 ? 0 :
+        chunkOffset[chunkIndex] + buffers.get(bufferIndex).position();
+  }
+
+  @Override
+  public boolean seekToNewSource(long targetPos) throws IOException {
+    return false;
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java
new file mode 100644
index 0000000..b65df9f
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java
@@ -0,0 +1,227 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.storage;
+
+import com.google.protobuf.ByteString;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.UUID;
+
+import static org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls.putKey;
+import static org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls
+    .writeChunk;
+
+/**
+ * An {@link OutputStream} used by the REST service in combination with the
+ * SCMClient to write the value of a key to a sequence
+ * of container chunks.  Writes are buffered locally and periodically written to
+ * the container as a new chunk.  In order to preserve the semantics that
+ * replacement of a pre-existing key is atomic, each instance of the stream has
+ * an internal unique identifier.  This unique identifier and a monotonically
+ * increasing chunk index form a composite key that is used as the chunk name.
+ * After all data is written, a putKey call creates or updates the corresponding
+ * container key, and this call includes the full list of chunks that make up
+ * the key data.  The list of chunks is updated all at once.  Therefore, a
+ * concurrent reader never can see an intermediate state in which different
+ * chunks of data from different versions of the key data are interleaved.
+ * This class encapsulates all state management for buffering and writing
+ * through to the container.
+ */
+public class ChunkOutputStream extends OutputStream {
+
+  private final String containerKey;
+  private final String key;
+  private final String traceID;
+  private final KeyData.Builder containerKeyData;
+  private XceiverClientManager xceiverClientManager;
+  private XceiverClientSpi xceiverClient;
+  private ByteBuffer buffer;
+  private final String streamId;
+  private int chunkIndex;
+  private int chunkSize;
+
+  /**
+   * Creates a new ChunkOutputStream.
+   *
+   * @param containerKey container key
+   * @param key chunk key
+   * @param xceiverClientManager client manager that controls client
+   * @param xceiverClient client to perform container calls
+   * @param traceID container protocol call args
+   * @param chunkSize chunk size
+   */
+  public ChunkOutputStream(String containerKey, String key,
+      XceiverClientManager xceiverClientManager, XceiverClientSpi xceiverClient,
+      String traceID, int chunkSize) {
+    this.containerKey = containerKey;
+    this.key = key;
+    this.traceID = traceID;
+    this.chunkSize = chunkSize;
+    KeyValue keyValue = KeyValue.newBuilder()
+        .setKey("TYPE").setValue("KEY").build();
+    this.containerKeyData = KeyData.newBuilder()
+        .setContainerName(xceiverClient.getPipeline().getContainerName())
+        .setName(containerKey)
+        .addMetadata(keyValue);
+    this.xceiverClientManager = xceiverClientManager;
+    this.xceiverClient = xceiverClient;
+    this.buffer = ByteBuffer.allocate(chunkSize);
+    this.streamId = UUID.randomUUID().toString();
+    this.chunkIndex = 0;
+  }
+
+  @Override
+  public synchronized void write(int b) throws IOException {
+    checkOpen();
+    int rollbackPosition = buffer.position();
+    int rollbackLimit = buffer.limit();
+    buffer.put((byte)b);
+    if (buffer.position() == chunkSize) {
+      flushBufferToChunk(rollbackPosition, rollbackLimit);
+    }
+  }
+
+  @Override
+  public void write(byte[] b, int off, int len) throws IOException {
+    if (b == null) {
+      throw new NullPointerException();
+    }
+    if ((off < 0) || (off > b.length) || (len < 0) ||
+        ((off + len) > b.length) || ((off + len) < 0)) {
+      throw new IndexOutOfBoundsException();
+    }
+    if (len == 0) {
+      return;
+    }
+    checkOpen();
+    while (len > 0) {
+      int writeLen = Math.min(chunkSize - buffer.position(), len);
+      int rollbackPosition = buffer.position();
+      int rollbackLimit = buffer.limit();
+      buffer.put(b, off, writeLen);
+      if (buffer.position() == chunkSize) {
+        flushBufferToChunk(rollbackPosition, rollbackLimit);
+      }
+      off += writeLen;
+      len -= writeLen;
+    }
+  }
+
+  @Override
+  public synchronized void flush() throws IOException {
+    checkOpen();
+    if (buffer.position() > 0) {
+      int rollbackPosition = buffer.position();
+      int rollbackLimit = buffer.limit();
+      flushBufferToChunk(rollbackPosition, rollbackLimit);
+    }
+  }
+
+  @Override
+  public synchronized void close() throws IOException {
+    if (xceiverClientManager != null && xceiverClient != null &&
+        buffer != null) {
+      try {
+        if (buffer.position() > 0) {
+          writeChunkToContainer();
+        }
+        putKey(xceiverClient, containerKeyData.build(), traceID);
+      } catch (IOException e) {
+        throw new IOException(
+            "Unexpected Storage Container Exception: " + e.toString(), e);
+      } finally {
+        xceiverClientManager.releaseClient(xceiverClient);
+        xceiverClientManager = null;
+        xceiverClient = null;
+        buffer = null;
+      }
+    }
+
+  }
+
+  /**
+   * Checks if the stream is open.  If not, throws an exception.
+   *
+   * @throws IOException if stream is closed
+   */
+  private synchronized void checkOpen() throws IOException {
+    if (xceiverClient == null) {
+      throw new IOException("ChunkOutputStream has been closed.");
+    }
+  }
+
+  /**
+   * Attempts to flush buffered writes by writing a new chunk to the container.
+   * If successful, then clears the buffer to prepare to receive writes for a
+   * new chunk.
+   *
+   * @param rollbackPosition position to restore in buffer if write fails
+   * @param rollbackLimit limit to restore in buffer if write fails
+   * @throws IOException if there is an I/O error while performing the call
+   */
+  private synchronized void flushBufferToChunk(int rollbackPosition,
+      int rollbackLimit) throws IOException {
+    boolean success = false;
+    try {
+      writeChunkToContainer();
+      success = true;
+    } finally {
+      if (success) {
+        buffer.clear();
+      } else {
+        buffer.position(rollbackPosition);
+        buffer.limit(rollbackLimit);
+      }
+    }
+  }
+
+  /**
+   * Writes buffered data as a new chunk to the container and saves chunk
+   * information to be used later in putKey call.
+   *
+   * @throws IOException if there is an I/O error while performing the call
+   */
+  private synchronized void writeChunkToContainer() throws IOException {
+    buffer.flip();
+    ByteString data = ByteString.copyFrom(buffer);
+    ChunkInfo chunk = ChunkInfo
+        .newBuilder()
+        .setChunkName(
+            DigestUtils.md5Hex(key) + "_stream_"
+                + streamId + "_chunk_" + ++chunkIndex)
+        .setOffset(0)
+        .setLen(data.size())
+        .build();
+    try {
+      writeChunk(xceiverClient, chunk, key, data, traceID);
+    } catch (IOException e) {
+      throw new IOException(
+          "Unexpected Storage Container Exception: " + e.toString(), e);
+    }
+    containerKeyData.addChunks(chunk);
+  }
+}
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java
new file mode 100644
index 0000000..6e7ce94
--- /dev/null
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.storage;
+
+/**
+ * Low level IO streams to upload/download chunks from container service.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
new file mode 100644
index 0000000..3571a89
--- /dev/null
+++ b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
@@ -0,0 +1,21 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<FindBugsFilter>
+  <Match>
+    <Package name="org.apache.hadoop.hdds.protocol.proto"/>
+  </Match>
+</FindBugsFilter>
diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml
new file mode 100644
index 0000000..b81da96
--- /dev/null
+++ b/hadoop-hdds/common/pom.xml
@@ -0,0 +1,128 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-hdds</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-hdds-common</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Distributed Data Store Common</description>
+  <name>Apache HDDS Common</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdds</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.fusesource.leveldbjni</groupId>
+      <artifactId>leveldbjni-all</artifactId>
+    </dependency>
+
+    <dependency>
+      <artifactId>ratis-server</artifactId>
+      <groupId>org.apache.ratis</groupId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-log4j12</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>io.dropwizard.metrics</groupId>
+          <artifactId>metrics-core</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <artifactId>ratis-netty</artifactId>
+      <groupId>org.apache.ratis</groupId>
+    </dependency>
+    <dependency>
+      <artifactId>ratis-grpc</artifactId>
+      <groupId>org.apache.ratis</groupId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.rocksdb</groupId>
+      <artifactId>rocksdbjni</artifactId>
+      <version>5.8.0</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-maven-plugins</artifactId>
+        <executions>
+          <execution>
+            <id>compile-protoc</id>
+            <goals>
+              <goal>protoc</goal>
+            </goals>
+            <configuration>
+              <protocVersion>${protobuf.version}</protocVersion>
+              <protocCommand>${protoc.path}</protocCommand>
+              <imports>
+                <param>
+                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/
+                </param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>StorageContainerLocationProtocol.proto</include>
+                  <include>DatanodeContainerProtocol.proto</include>
+                  <include>hdds.proto</include>
+                  <include>ScmBlockLocationProtocol.proto</include>
+                </includes>
+              </source>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <configuration>
+          <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
new file mode 100644
index 0000000..dec2c1c
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds;
+
+public final class HddsConfigKeys {
+  private HddsConfigKeys() {
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
new file mode 100644
index 0000000..48c6dce
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
@@ -0,0 +1,318 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import com.google.common.net.HostAndPort;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.net.DNS;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.HashSet;
+
+import static org.apache.hadoop.hdfs.DFSConfigKeys
+    .DFS_DATANODE_DNS_INTERFACE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys
+    .DFS_DATANODE_DNS_NAMESERVER_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED_DEFAULT;
+
+/**
+ * HDDS specific stateless utility functions.
+ */
+public final class HddsUtils {
+
+
+  private static final Logger LOG = LoggerFactory.getLogger(HddsUtils.class);
+
+  /**
+   * The service ID of the solitary Ozone SCM service.
+   */
+  public static final String OZONE_SCM_SERVICE_ID = "OzoneScmService";
+  public static final String OZONE_SCM_SERVICE_INSTANCE_ID =
+      "OzoneScmServiceInstance";
+
+  private static final int NO_PORT = -1;
+
+  private HddsUtils() {
+  }
+
+  /**
+   * Retrieve the socket address that should be used by clients to connect
+   * to the SCM.
+   *
+   * @param conf
+   * @return Target InetSocketAddress for the SCM client endpoint.
+   */
+  public static InetSocketAddress getScmAddressForClients(Configuration conf) {
+    final Optional<String> host = getHostNameFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY);
+
+    if (!host.isPresent()) {
+      throw new IllegalArgumentException(
+          ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY + " must be defined. See"
+              + " https://wiki.apache.org/hadoop/Ozone#Configuration for "
+              + "details"
+              + " on configuring Ozone.");
+    }
+
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY);
+
+    return NetUtils.createSocketAddr(host.get() + ":" + port
+        .or(ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT));
+  }
+
+  /**
+   * Retrieve the socket address that should be used by clients to connect
+   * to the SCM for block service. If
+   * {@link ScmConfigKeys#OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY} is not defined
+   * then {@link ScmConfigKeys#OZONE_SCM_CLIENT_ADDRESS_KEY} is used.
+   *
+   * @param conf
+   * @return Target InetSocketAddress for the SCM block client endpoint.
+   * @throws IllegalArgumentException if configuration is not defined.
+   */
+  public static InetSocketAddress getScmAddressForBlockClients(
+      Configuration conf) {
+    Optional<String> host = getHostNameFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY);
+
+    if (!host.isPresent()) {
+      host = getHostNameFromConfigKeys(conf,
+          ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY);
+      if (!host.isPresent()) {
+        throw new IllegalArgumentException(
+            ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY
+                + " must be defined. See"
+                + " https://wiki.apache.org/hadoop/Ozone#Configuration"
+                + " for details on configuring Ozone.");
+      }
+    }
+
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY);
+
+    return NetUtils.createSocketAddr(host.get() + ":" + port
+        .or(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_PORT_DEFAULT));
+  }
+
+  /**
+   * Retrieve the hostname, trying the supplied config keys in order.
+   * Each config value may be absent, or if present in the format
+   * host:port (the :port part is optional).
+   *
+   * @param conf  - Conf
+   * @param keys a list of configuration key names.
+   *
+   * @return first hostname component found from the given keys, or absent.
+   * @throws IllegalArgumentException if any values are not in the 'host'
+   *             or host:port format.
+   */
+  public static Optional<String> getHostNameFromConfigKeys(Configuration conf,
+      String... keys) {
+    for (final String key : keys) {
+      final String value = conf.getTrimmed(key);
+      final Optional<String> hostName = getHostName(value);
+      if (hostName.isPresent()) {
+        return hostName;
+      }
+    }
+    return Optional.absent();
+  }
+
+  /**
+   * Gets the hostname or Indicates that it is absent.
+   * @param value host or host:port
+   * @return hostname
+   */
+  public static Optional<String> getHostName(String value) {
+    if ((value == null) || value.isEmpty()) {
+      return Optional.absent();
+    }
+    return Optional.of(HostAndPort.fromString(value).getHostText());
+  }
+
+  /**
+   * Gets the port if there is one, throws otherwise.
+   * @param value  String in host:port format.
+   * @return Port
+   */
+  public static Optional<Integer> getHostPort(String value) {
+    if ((value == null) || value.isEmpty()) {
+      return Optional.absent();
+    }
+    int port = HostAndPort.fromString(value).getPortOrDefault(NO_PORT);
+    if (port == NO_PORT) {
+      return Optional.absent();
+    } else {
+      return Optional.of(port);
+    }
+  }
+
+  /**
+   * Retrieve the port number, trying the supplied config keys in order.
+   * Each config value may be absent, or if present in the format
+   * host:port (the :port part is optional).
+   *
+   * @param conf Conf
+   * @param keys a list of configuration key names.
+   *
+   * @return first port number component found from the given keys, or absent.
+   * @throws IllegalArgumentException if any values are not in the 'host'
+   *             or host:port format.
+   */
+  public static Optional<Integer> getPortNumberFromConfigKeys(
+      Configuration conf, String... keys) {
+    for (final String key : keys) {
+      final String value = conf.getTrimmed(key);
+      final Optional<Integer> hostPort = getHostPort(value);
+      if (hostPort.isPresent()) {
+        return hostPort;
+      }
+    }
+    return Optional.absent();
+  }
+
+  /**
+   * Retrieve the socket addresses of all storage container managers.
+   *
+   * @param conf
+   * @return A collection of SCM addresses
+   * @throws IllegalArgumentException If the configuration is invalid
+   */
+  public static Collection<InetSocketAddress> getSCMAddresses(
+      Configuration conf) throws IllegalArgumentException {
+    Collection<InetSocketAddress> addresses =
+        new HashSet<InetSocketAddress>();
+    Collection<String> names =
+        conf.getTrimmedStringCollection(ScmConfigKeys.OZONE_SCM_NAMES);
+    if (names == null || names.isEmpty()) {
+      throw new IllegalArgumentException(ScmConfigKeys.OZONE_SCM_NAMES
+          + " need to be a set of valid DNS names or IP addresses."
+          + " Null or empty address list found.");
+    }
+
+    final com.google.common.base.Optional<Integer>
+        defaultPort =  com.google.common.base.Optional.of(ScmConfigKeys
+        .OZONE_SCM_DEFAULT_PORT);
+    for (String address : names) {
+      com.google.common.base.Optional<String> hostname =
+          getHostName(address);
+      if (!hostname.isPresent()) {
+        throw new IllegalArgumentException("Invalid hostname for SCM: "
+            + hostname);
+      }
+      com.google.common.base.Optional<Integer> port =
+          getHostPort(address);
+      InetSocketAddress addr = NetUtils.createSocketAddr(hostname.get(),
+          port.or(defaultPort.get()));
+      addresses.add(addr);
+    }
+    return addresses;
+  }
+
+  public static boolean isHddsEnabled(Configuration conf) {
+    String securityEnabled =
+        conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+            "simple");
+    boolean securityAuthorizationEnabled = conf.getBoolean(
+        CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false);
+
+    if (securityEnabled.equals("kerberos") || securityAuthorizationEnabled) {
+      LOG.error("Ozone is not supported in a security enabled cluster. ");
+      return false;
+    } else {
+      return conf.getBoolean(OZONE_ENABLED, OZONE_ENABLED_DEFAULT);
+    }
+  }
+
+
+  /**
+   * Get the path for datanode id file.
+   *
+   * @param conf - Configuration
+   * @return the path of datanode id as string
+   */
+  public static String getDatanodeIdFilePath(Configuration conf) {
+    String dataNodeIDPath = conf.get(ScmConfigKeys.OZONE_SCM_DATANODE_ID);
+    if (dataNodeIDPath == null) {
+      String metaPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
+      if (Strings.isNullOrEmpty(metaPath)) {
+        // this means meta data is not found, in theory should not happen at
+        // this point because should've failed earlier.
+        throw new IllegalArgumentException("Unable to locate meta data" +
+            "directory when getting datanode id path");
+      }
+      dataNodeIDPath = Paths.get(metaPath,
+          ScmConfigKeys.OZONE_SCM_DATANODE_ID_PATH_DEFAULT).toString();
+    }
+    return dataNodeIDPath;
+  }
+
+  /**
+   * Returns the hostname for this datanode. If the hostname is not
+   * explicitly configured in the given config, then it is determined
+   * via the DNS class.
+   *
+   * @param conf Configuration
+   *
+   * @return the hostname (NB: may not be a FQDN)
+   * @throws UnknownHostException if the dfs.datanode.dns.interface
+   *    option is used and the hostname can not be determined
+   */
+  public static String getHostName(Configuration conf)
+      throws UnknownHostException {
+    String name = conf.get(DFS_DATANODE_HOST_NAME_KEY);
+    if (name == null) {
+      String dnsInterface = conf.get(
+          CommonConfigurationKeys.HADOOP_SECURITY_DNS_INTERFACE_KEY);
+      String nameServer = conf.get(
+          CommonConfigurationKeys.HADOOP_SECURITY_DNS_NAMESERVER_KEY);
+      boolean fallbackToHosts = false;
+
+      if (dnsInterface == null) {
+        // Try the legacy configuration keys.
+        dnsInterface = conf.get(DFS_DATANODE_DNS_INTERFACE_KEY);
+        nameServer = conf.get(DFS_DATANODE_DNS_NAMESERVER_KEY);
+      } else {
+        // If HADOOP_SECURITY_DNS_* is set then also attempt hosts file
+        // resolution if DNS fails. We will not use hosts file resolution
+        // by default to avoid breaking existing clusters.
+        fallbackToHosts = true;
+      }
+
+      name = DNS.getDefaultHost(dnsInterface, nameServer, fallbackToHosts);
+    }
+    return name;
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java
new file mode 100644
index 0000000..59708a9
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java
@@ -0,0 +1,203 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.client;
+
+import org.apache.hadoop.ozone.OzoneConsts;
+
+
+/**
+ * represents an OzoneQuota Object that can be applied to
+ * a storage volume.
+ */
+public class OzoneQuota {
+
+  public static final String OZONE_QUOTA_BYTES = "BYTES";
+  public static final String OZONE_QUOTA_MB = "MB";
+  public static final String OZONE_QUOTA_GB = "GB";
+  public static final String OZONE_QUOTA_TB = "TB";
+
+  private Units unit;
+  private long size;
+
+  /** Quota Units.*/
+  public enum Units {UNDEFINED, BYTES, KB, MB, GB, TB}
+
+  /**
+   * Returns size.
+   *
+   * @return long
+   */
+  public long getSize() {
+    return size;
+  }
+
+  /**
+   * Returns Units.
+   *
+   * @return Unit in MB, GB or TB
+   */
+  public Units getUnit() {
+    return unit;
+  }
+
+  /**
+   * Constructs a default Quota object.
+   */
+  public OzoneQuota() {
+    this.size = 0;
+    this.unit = Units.UNDEFINED;
+  }
+
+  /**
+   * Constructor for Ozone Quota.
+   *
+   * @param size Long Size
+   * @param unit MB, GB  or TB
+   */
+  public OzoneQuota(long size, Units unit) {
+    this.size = size;
+    this.unit = unit;
+  }
+
+  /**
+   * Formats a quota as a string.
+   *
+   * @param quota the quota to format
+   * @return string representation of quota
+   */
+  public static String formatQuota(OzoneQuota quota) {
+    return String.valueOf(quota.size) + quota.unit;
+  }
+
+  /**
+   * Parses a user provided string and returns the
+   * Quota Object.
+   *
+   * @param quotaString Quota String
+   *
+   * @return OzoneQuota object
+   *
+   * @throws IllegalArgumentException
+   */
+  public static OzoneQuota parseQuota(String quotaString)
+      throws IllegalArgumentException {
+
+    if ((quotaString == null) || (quotaString.isEmpty())) {
+      throw new IllegalArgumentException(
+          "Quota string cannot be null or empty.");
+    }
+
+    String uppercase = quotaString.toUpperCase().replaceAll("\\s+", "");
+    String size = "";
+    int nSize;
+    Units currUnit = Units.MB;
+    Boolean found = false;
+    if (uppercase.endsWith(OZONE_QUOTA_MB)) {
+      size = uppercase
+          .substring(0, uppercase.length() - OZONE_QUOTA_MB.length());
+      currUnit = Units.MB;
+      found = true;
+    }
+
+    if (uppercase.endsWith(OZONE_QUOTA_GB)) {
+      size = uppercase
+          .substring(0, uppercase.length() - OZONE_QUOTA_GB.length());
+      currUnit = Units.GB;
+      found = true;
+    }
+
+    if (uppercase.endsWith(OZONE_QUOTA_TB)) {
+      size = uppercase
+          .substring(0, uppercase.length() - OZONE_QUOTA_TB.length());
+      currUnit = Units.TB;
+      found = true;
+    }
+
+    if (uppercase.endsWith(OZONE_QUOTA_BYTES)) {
+      size = uppercase
+          .substring(0, uppercase.length() - OZONE_QUOTA_BYTES.length());
+      currUnit = Units.BYTES;
+      found = true;
+    }
+
+    if (!found) {
+      throw new IllegalArgumentException(
+          "Quota unit not recognized. Supported values are BYTES, MB, GB and " +
+              "TB.");
+    }
+
+    nSize = Integer.parseInt(size);
+    if (nSize < 0) {
+      throw new IllegalArgumentException("Quota cannot be negative.");
+    }
+
+    return new OzoneQuota(nSize, currUnit);
+  }
+
+
+  /**
+   * Returns size in Bytes or -1 if there is no Quota.
+   */
+  public long sizeInBytes() {
+    switch (this.unit) {
+    case BYTES:
+      return this.getSize();
+    case MB:
+      return this.getSize() * OzoneConsts.MB;
+    case GB:
+      return this.getSize() * OzoneConsts.GB;
+    case TB:
+      return this.getSize() * OzoneConsts.TB;
+    case UNDEFINED:
+    default:
+      return -1;
+    }
+  }
+
+  /**
+   * Returns OzoneQuota corresponding to size in bytes.
+   *
+   * @param sizeInBytes size in bytes to be converted
+   *
+   * @return OzoneQuota object
+   */
+  public static OzoneQuota getOzoneQuota(long sizeInBytes) {
+    long size;
+    Units unit;
+    if (sizeInBytes % OzoneConsts.TB == 0) {
+      size = sizeInBytes / OzoneConsts.TB;
+      unit = Units.TB;
+    } else if (sizeInBytes % OzoneConsts.GB == 0) {
+      size = sizeInBytes / OzoneConsts.GB;
+      unit = Units.GB;
+    } else if (sizeInBytes % OzoneConsts.MB == 0) {
+      size = sizeInBytes / OzoneConsts.MB;
+      unit = Units.MB;
+    } else {
+      size = sizeInBytes;
+      unit = Units.BYTES;
+    }
+    return new OzoneQuota((int)size, unit);
+  }
+
+  @Override
+  public String toString() {
+    return size + " " + unit;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java
new file mode 100644
index 0000000..0215964
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.client;
+
+/**
+ * The replication factor to be used while writing key into ozone.
+ */
+public enum ReplicationFactor {
+  ONE(1),
+  THREE(3);
+
+  /**
+   * Integer representation of replication.
+   */
+  private int value;
+
+  /**
+   * Initializes ReplicationFactor with value.
+   * @param value replication value
+   */
+  ReplicationFactor(int value) {
+    this.value = value;
+  }
+
+  /**
+   * Returns enum value corresponding to the int value.
+   * @param value replication value
+   * @return ReplicationFactor
+   */
+  public static ReplicationFactor valueOf(int value) {
+    if(value == 1) {
+      return ONE;
+    }
+    if (value == 3) {
+      return THREE;
+    }
+    throw new IllegalArgumentException("Unsupported value: " + value);
+  }
+
+  /**
+   * Returns integer representation of ReplicationFactor.
+   * @return replication value
+   */
+  public int getValue() {
+    return value;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java
new file mode 100644
index 0000000..259a1a2
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.client;
+
+/**
+ * The replication type to be used while writing key into ozone.
+ */
+public enum ReplicationType {
+    RATIS,
+    STAND_ALONE,
+    CHAINED
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java
new file mode 100644
index 0000000..e81f134
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.client;
+
+/**
+ * Base property types for HDDS containers and replications.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/HddsConfServlet.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/HddsConfServlet.java
new file mode 100644
index 0000000..b8d0b24
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/HddsConfServlet.java
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.conf;
+
+import com.google.gson.Gson;
+import java.io.IOException;
+import java.io.Writer;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.HttpHeaders;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.HttpServer2;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A servlet to print out the running configuration data.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Unstable
+public class HddsConfServlet extends HttpServlet {
+
+  private static final long serialVersionUID = 1L;
+
+  protected static final String FORMAT_JSON = "json";
+  protected static final String FORMAT_XML = "xml";
+  private static final String COMMAND = "cmd";
+  private static final OzoneConfiguration OZONE_CONFIG =
+      new OzoneConfiguration();
+  private static final transient Logger LOG =
+      LoggerFactory.getLogger(HddsConfServlet.class);
+
+
+  /**
+   * Return the Configuration of the daemon hosting this servlet.
+   * This is populated when the HttpServer starts.
+   */
+  private Configuration getConfFromContext() {
+    Configuration conf = (Configuration) getServletContext().getAttribute(
+        HttpServer2.CONF_CONTEXT_ATTRIBUTE);
+    assert conf != null;
+    return conf;
+  }
+
+  @Override
+  public void doGet(HttpServletRequest request, HttpServletResponse response)
+      throws ServletException, IOException {
+
+    if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(),
+        request, response)) {
+      return;
+    }
+
+    String format = parseAcceptHeader(request);
+    if (FORMAT_XML.equals(format)) {
+      response.setContentType("text/xml; charset=utf-8");
+    } else if (FORMAT_JSON.equals(format)) {
+      response.setContentType("application/json; charset=utf-8");
+    }
+
+    String name = request.getParameter("name");
+    Writer out = response.getWriter();
+    String cmd = request.getParameter(COMMAND);
+
+    processCommand(cmd, format, request, response, out, name);
+    out.close();
+  }
+
+  private void processCommand(String cmd, String format,
+      HttpServletRequest request, HttpServletResponse response, Writer out,
+      String name)
+      throws IOException {
+    try {
+      if (cmd == null) {
+        if (FORMAT_XML.equals(format)) {
+          response.setContentType("text/xml; charset=utf-8");
+        } else if (FORMAT_JSON.equals(format)) {
+          response.setContentType("application/json; charset=utf-8");
+        }
+
+        writeResponse(getConfFromContext(), out, format, name);
+      } else {
+        processConfigTagRequest(request, out);
+      }
+    } catch (BadFormatException bfe) {
+      response.sendError(HttpServletResponse.SC_BAD_REQUEST, bfe.getMessage());
+    } catch (IllegalArgumentException iae) {
+      response.sendError(HttpServletResponse.SC_NOT_FOUND, iae.getMessage());
+    }
+  }
+
+  @VisibleForTesting
+  static String parseAcceptHeader(HttpServletRequest request) {
+    String format = request.getHeader(HttpHeaders.ACCEPT);
+    return format != null && format.contains(FORMAT_JSON) ?
+        FORMAT_JSON : FORMAT_XML;
+  }
+
+  /**
+   * Guts of the servlet - extracted for easy testing.
+   */
+  static void writeResponse(Configuration conf,
+      Writer out, String format, String propertyName)
+      throws IOException, IllegalArgumentException, BadFormatException {
+    if (FORMAT_JSON.equals(format)) {
+      Configuration.dumpConfiguration(conf, propertyName, out);
+    } else if (FORMAT_XML.equals(format)) {
+      conf.writeXml(propertyName, out);
+    } else {
+      throw new BadFormatException("Bad format: " + format);
+    }
+  }
+
+  public static class BadFormatException extends Exception {
+
+    private static final long serialVersionUID = 1L;
+
+    public BadFormatException(String msg) {
+      super(msg);
+    }
+  }
+
+  private void processConfigTagRequest(HttpServletRequest request,
+      Writer out) throws IOException {
+    String cmd = request.getParameter(COMMAND);
+    Gson gson = new Gson();
+    Configuration config = getOzoneConfig();
+
+    switch (cmd) {
+    case "getOzoneTags":
+      out.write(gson.toJson(config.get("ozone.system.tags").split(",")));
+      break;
+    case "getPropertyByTag":
+      String tags = request.getParameter("tags");
+      Map<String, Properties> propMap = new HashMap<>();
+
+      for (String tag : tags.split(",")) {
+        if (config.isPropertyTag(tag)) {
+          Properties properties = config.getAllPropertiesByTag(tag);
+          propMap.put(tag, properties);
+        } else {
+          LOG.debug("Not a valid tag" + tag);
+        }
+      }
+      out.write(gson.toJsonTree(propMap).toString());
+      break;
+    default:
+      throw new IllegalArgumentException(cmd + " is not a valid command.");
+    }
+
+  }
+
+  private static Configuration getOzoneConfig() {
+    return OZONE_CONFIG;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java
new file mode 100644
index 0000000..f07718c
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.conf;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Configuration for ozone.
+ */
+@InterfaceAudience.Private
+public class OzoneConfiguration extends Configuration {
+  static {
+    activate();
+  }
+
+  public OzoneConfiguration() {
+    OzoneConfiguration.activate();
+  }
+
+  public OzoneConfiguration(Configuration conf) {
+    super(conf);
+  }
+
+  public List<Property> readPropertyFromXml(URL url) throws JAXBException {
+    JAXBContext context = JAXBContext.newInstance(XMLConfiguration.class);
+    Unmarshaller um = context.createUnmarshaller();
+
+    XMLConfiguration config = (XMLConfiguration) um.unmarshal(url);
+    return config.getProperties();
+  }
+
+  /**
+   * Class to marshall/un-marshall configuration from xml files.
+   */
+  @XmlAccessorType(XmlAccessType.FIELD)
+  @XmlRootElement(name = "configuration")
+  public static class XMLConfiguration {
+
+    @XmlElement(name = "property", type = Property.class)
+    private List<Property> properties = new ArrayList<>();
+
+    public XMLConfiguration() {
+    }
+
+    public XMLConfiguration(List<Property> properties) {
+      this.properties = properties;
+    }
+
+    public List<Property> getProperties() {
+      return properties;
+    }
+
+    public void setProperties(List<Property> properties) {
+      this.properties = properties;
+    }
+  }
+
+  /**
+   * Class to marshall/un-marshall configuration properties from xml files.
+   */
+  @XmlAccessorType(XmlAccessType.FIELD)
+  @XmlRootElement(name = "property")
+  public static class Property implements Comparable<Property> {
+
+    private String name;
+    private String value;
+    private String tag;
+    private String description;
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public String getValue() {
+      return value;
+    }
+
+    public void setValue(String value) {
+      this.value = value;
+    }
+
+    public String getTag() {
+      return tag;
+    }
+
+    public void setTag(String tag) {
+      this.tag = tag;
+    }
+
+    public String getDescription() {
+      return description;
+    }
+
+    public void setDescription(String description) {
+      this.description = description;
+    }
+
+    @Override
+    public int compareTo(Property o) {
+      if (this == o) {
+        return 0;
+      }
+      return this.getName().compareTo(o.getName());
+    }
+
+    @Override
+    public String toString() {
+      return this.getName() + " " + this.getValue() + this.getTag();
+    }
+
+    @Override
+    public int hashCode(){
+      return this.getName().hashCode();
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      return (obj instanceof Property) && (((Property) obj).getName())
+          .equals(this.getName());
+    }
+  }
+
+  public static void activate(){
+    // adds the default resources
+    Configuration.addDefaultResource("hdfs-default.xml");
+    Configuration.addDefaultResource("hdfs-site.xml");
+    Configuration.addDefaultResource("ozone-default.xml");
+    Configuration.addDefaultResource("ozone-site.xml");
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java
new file mode 100644
index 0000000..948057e
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.conf;
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java
new file mode 100644
index 0000000..f8894e6
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds;
+
+/**
+ * Generic HDDS specific configurator and helper classes.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java
new file mode 100644
index 0000000..b2fa291
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java
@@ -0,0 +1,353 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.protocol;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.util.UUID;
+
+/**
+ * DatanodeDetails class contains details about DataNode like:
+ * - UUID of the DataNode.
+ * - IP and Hostname details.
+ * - Port details to which the DataNode will be listening.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class DatanodeDetails implements Comparable<DatanodeDetails> {
+
+  /**
+   * DataNode's unique identifier in the cluster.
+   */
+  private final UUID uuid;
+
+  private String ipAddress;
+  private String hostName;
+  private Integer containerPort;
+  private Integer ratisPort;
+  private Integer ozoneRestPort;
+
+
+  /**
+   * Constructs DatanodeDetails instance. DatanodeDetails.Builder is used
+   * for instantiating DatanodeDetails.
+   * @param uuid DataNode's UUID
+   * @param ipAddress IP Address of this DataNode
+   * @param hostName DataNode's hostname
+   * @param containerPort Container Port
+   * @param ratisPort Ratis Port
+   * @param ozoneRestPort Rest Port
+   */
+  private DatanodeDetails(String uuid, String ipAddress, String hostName,
+      Integer containerPort, Integer ratisPort, Integer ozoneRestPort) {
+    this.uuid = UUID.fromString(uuid);
+    this.ipAddress = ipAddress;
+    this.hostName = hostName;
+    this.containerPort = containerPort;
+    this.ratisPort = ratisPort;
+    this.ozoneRestPort = ozoneRestPort;
+  }
+
+  /**
+   * Returns the DataNode UUID.
+   *
+   * @return UUID of DataNode
+   */
+  public UUID getUuid() {
+    return uuid;
+  }
+
+  /**
+   * Returns the string representation of DataNode UUID.
+   *
+   * @return UUID of DataNode
+   */
+  public String getUuidString() {
+    return uuid.toString();
+  }
+
+  /**
+   * Sets the IP address of Datanode.
+   *
+   * @param ip IP Address
+   */
+  public void setIpAddress(String ip) {
+    this.ipAddress = ip;
+  }
+
+  /**
+   * Returns IP address of DataNode.
+   *
+   * @return IP address
+   */
+  public String getIpAddress() {
+    return ipAddress;
+  }
+
+  /**
+   * Sets the Datanode hostname.
+   *
+   * @param host hostname
+   */
+  public void setHostName(String host) {
+    this.hostName = host;
+  }
+
+  /**
+   * Returns Hostname of DataNode.
+   *
+   * @return Hostname
+   */
+  public String getHostName() {
+    return hostName;
+  }
+
+  /**
+   * Sets the Container Port.
+   * @param port ContainerPort
+   */
+  public void setContainerPort(int port) {
+    containerPort = port;
+  }
+
+  /**
+   * Returns standalone container Port.
+   *
+   * @return Container Port
+   */
+  public int getContainerPort() {
+    return containerPort;
+  }
+
+  /**
+   * Sets Ratis Port.
+   * @param port RatisPort
+   */
+  public void setRatisPort(int port) {
+    ratisPort = port;
+  }
+
+
+  /**
+   * Returns Ratis Port.
+   * @return Ratis Port
+   */
+  public int getRatisPort() {
+    return ratisPort;
+  }
+
+
+  /**
+   * Sets OzoneRestPort.
+   * @param port OzoneRestPort
+   */
+  public void setOzoneRestPort(int port) {
+    ozoneRestPort = port;
+  }
+
+  /**
+   * Returns Ozone Rest Port.
+   * @return OzoneRestPort
+   */
+  public int getOzoneRestPort() {
+    return ozoneRestPort;
+  }
+
+  /**
+   * Returns a DatanodeDetails from the protocol buffers.
+   *
+   * @param datanodeDetailsProto - protoBuf Message
+   * @return DatanodeDetails
+   */
+  public static DatanodeDetails getFromProtoBuf(
+      HddsProtos.DatanodeDetailsProto datanodeDetailsProto) {
+    DatanodeDetails.Builder builder = newBuilder();
+    builder.setUuid(datanodeDetailsProto.getUuid());
+    if (datanodeDetailsProto.hasIpAddress()) {
+      builder.setIpAddress(datanodeDetailsProto.getIpAddress());
+    }
+    if (datanodeDetailsProto.hasHostName()) {
+      builder.setHostName(datanodeDetailsProto.getHostName());
+    }
+    if (datanodeDetailsProto.hasContainerPort()) {
+      builder.setContainerPort(datanodeDetailsProto.getContainerPort());
+    }
+    if (datanodeDetailsProto.hasRatisPort()) {
+      builder.setRatisPort(datanodeDetailsProto.getRatisPort());
+    }
+    if (datanodeDetailsProto.hasOzoneRestPort()) {
+      builder.setOzoneRestPort(datanodeDetailsProto.getOzoneRestPort());
+    }
+    return builder.build();
+  }
+
+  /**
+   * Returns a DatanodeDetails protobuf message from a datanode ID.
+   * @return HddsProtos.DatanodeDetailsProto
+   */
+  public HddsProtos.DatanodeDetailsProto getProtoBufMessage() {
+    HddsProtos.DatanodeDetailsProto.Builder builder =
+        HddsProtos.DatanodeDetailsProto.newBuilder()
+            .setUuid(getUuidString());
+    if (ipAddress != null) {
+      builder.setIpAddress(ipAddress);
+    }
+    if (hostName != null) {
+      builder.setHostName(hostName);
+    }
+    if (containerPort != null) {
+      builder.setContainerPort(containerPort);
+    }
+    if (ratisPort != null) {
+      builder.setRatisPort(ratisPort);
+    }
+    if (ozoneRestPort != null) {
+      builder.setOzoneRestPort(ozoneRestPort);
+    }
+    return builder.build();
+  }
+
+  @Override
+  public String toString() {
+    return uuid.toString() + "{" +
+        "ip: " +
+        ipAddress +
+        ", host: " +
+        hostName +
+        "}";
+  }
+
+  @Override
+  public int compareTo(DatanodeDetails that) {
+    return this.getUuid().compareTo(that.getUuid());
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    return obj instanceof DatanodeDetails &&
+        uuid.equals(((DatanodeDetails) obj).uuid);
+  }
+
+  @Override
+  public int hashCode() {
+    return uuid.hashCode();
+  }
+
+  /**
+   * Returns DatanodeDetails.Builder instance.
+   *
+   * @return DatanodeDetails.Builder
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Builder class for building DatanodeDetails.
+   */
+  public static class Builder {
+    private String id;
+    private String ipAddress;
+    private String hostName;
+    private Integer containerPort;
+    private Integer ratisPort;
+    private Integer ozoneRestPort;
+
+    /**
+     * Sets the DatanodeUuid.
+     *
+     * @param uuid DatanodeUuid
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setUuid(String uuid) {
+      this.id = uuid;
+      return this;
+    }
+
+    /**
+     * Sets the IP address of DataNode.
+     *
+     * @param ip address
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setIpAddress(String ip) {
+      this.ipAddress = ip;
+      return this;
+    }
+
+    /**
+     * Sets the hostname of DataNode.
+     *
+     * @param host hostname
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setHostName(String host) {
+      this.hostName = host;
+      return this;
+    }
+    /**
+     * Sets the ContainerPort.
+     *
+     * @param port ContainerPort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setContainerPort(Integer port) {
+      this.containerPort = port;
+      return this;
+    }
+
+    /**
+     * Sets the RatisPort.
+     *
+     * @param port RatisPort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setRatisPort(Integer port) {
+      this.ratisPort = port;
+      return this;
+    }
+
+    /**
+     * Sets the OzoneRestPort.
+     *
+     * @param port OzoneRestPort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setOzoneRestPort(Integer port) {
+      this.ozoneRestPort = port;
+      return this;
+    }
+
+    /**
+     * Builds and returns DatanodeDetails instance.
+     *
+     * @return DatanodeDetails
+     */
+    public DatanodeDetails build() {
+      Preconditions.checkNotNull(id);
+      return new DatanodeDetails(id, ipAddress, hostName, containerPort,
+          ratisPort, ozoneRestPort);
+    }
+
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java
new file mode 100644
index 0000000..7dae0fc
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package contains HDDS protocol related classes.
+ */
+package org.apache.hadoop.hdds.protocol;
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
new file mode 100644
index 0000000..7f40ab2
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
@@ -0,0 +1,271 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * This class contains constants for configuration keys used in SCM.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public final class ScmConfigKeys {
+
+  public static final String SCM_CONTAINER_CLIENT_STALE_THRESHOLD_KEY =
+      "scm.container.client.idle.threshold";
+  public static final String SCM_CONTAINER_CLIENT_STALE_THRESHOLD_DEFAULT =
+      "10s";
+
+  public static final String SCM_CONTAINER_CLIENT_MAX_SIZE_KEY =
+      "scm.container.client.max.size";
+  public static final int SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT =
+      256;
+
+  public static final String SCM_CONTAINER_CLIENT_MAX_OUTSTANDING_REQUESTS =
+      "scm.container.client.max.outstanding.requests";
+  public static final int SCM_CONTAINER_CLIENT_MAX_OUTSTANDING_REQUESTS_DEFAULT
+      = 100;
+
+  public static final String DFS_CONTAINER_RATIS_ENABLED_KEY
+      = "dfs.container.ratis.enabled";
+  public static final boolean DFS_CONTAINER_RATIS_ENABLED_DEFAULT
+      = false;
+  public static final String DFS_CONTAINER_RATIS_RPC_TYPE_KEY
+      = "dfs.container.ratis.rpc.type";
+  public static final String DFS_CONTAINER_RATIS_RPC_TYPE_DEFAULT
+      = "GRPC";
+  public static final String DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_KEY
+      = "dfs.container.ratis.num.write.chunk.threads";
+  public static final int DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_DEFAULT
+      = 60;
+  public static final String DFS_CONTAINER_RATIS_SEGMENT_SIZE_KEY =
+      "dfs.container.ratis.segment.size";
+  public static final int DFS_CONTAINER_RATIS_SEGMENT_SIZE_DEFAULT =
+      1 * 1024 * 1024 * 1024;
+  public static final String DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_KEY =
+      "dfs.container.ratis.segment.preallocated.size";
+  public static final int
+      DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_DEFAULT = 128 * 1024 * 1024;
+
+  // TODO : this is copied from OzoneConsts, may need to move to a better place
+  public static final String OZONE_SCM_CHUNK_SIZE_KEY = "ozone.scm.chunk.size";
+  // 16 MB by default
+  public static final int OZONE_SCM_CHUNK_SIZE_DEFAULT = 16 * 1024 * 1024;
+  public static final int OZONE_SCM_CHUNK_MAX_SIZE = 32 * 1024 * 1024;
+
+  public static final String OZONE_SCM_CLIENT_PORT_KEY =
+      "ozone.scm.client.port";
+  public static final int OZONE_SCM_CLIENT_PORT_DEFAULT = 9860;
+
+  public static final String OZONE_SCM_DATANODE_PORT_KEY =
+      "ozone.scm.datanode.port";
+  public static final int OZONE_SCM_DATANODE_PORT_DEFAULT = 9861;
+
+  // OZONE_KSM_PORT_DEFAULT = 9862
+  public static final String OZONE_SCM_BLOCK_CLIENT_PORT_KEY =
+      "ozone.scm.block.client.port";
+  public static final int OZONE_SCM_BLOCK_CLIENT_PORT_DEFAULT = 9863;
+
+  // Container service client
+  public static final String OZONE_SCM_CLIENT_ADDRESS_KEY =
+      "ozone.scm.client.address";
+  public static final String OZONE_SCM_CLIENT_BIND_HOST_KEY =
+      "ozone.scm.client.bind.host";
+  public static final String OZONE_SCM_CLIENT_BIND_HOST_DEFAULT =
+      "0.0.0.0";
+
+  // Block service client
+  public static final String OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY =
+      "ozone.scm.block.client.address";
+  public static final String OZONE_SCM_BLOCK_CLIENT_BIND_HOST_KEY =
+      "ozone.scm.block.client.bind.host";
+  public static final String OZONE_SCM_BLOCK_CLIENT_BIND_HOST_DEFAULT =
+      "0.0.0.0";
+
+  public static final String OZONE_SCM_DATANODE_ADDRESS_KEY =
+      "ozone.scm.datanode.address";
+  public static final String OZONE_SCM_DATANODE_BIND_HOST_KEY =
+      "ozone.scm.datanode.bind.host";
+  public static final String OZONE_SCM_DATANODE_BIND_HOST_DEFAULT =
+      "0.0.0.0";
+
+  public static final String OZONE_SCM_HTTP_ENABLED_KEY =
+      "ozone.scm.http.enabled";
+  public static final String OZONE_SCM_HTTP_BIND_HOST_KEY =
+      "ozone.scm.http-bind-host";
+  public static final String OZONE_SCM_HTTPS_BIND_HOST_KEY =
+      "ozone.scm.https-bind-host";
+  public static final String OZONE_SCM_HTTP_ADDRESS_KEY =
+      "ozone.scm.http-address";
+  public static final String OZONE_SCM_HTTPS_ADDRESS_KEY =
+      "ozone.scm.https-address";
+  public static final String OZONE_SCM_KEYTAB_FILE =
+      "ozone.scm.keytab.file";
+  public static final String OZONE_SCM_HTTP_BIND_HOST_DEFAULT = "0.0.0.0";
+  public static final int OZONE_SCM_HTTP_BIND_PORT_DEFAULT = 9876;
+  public static final int OZONE_SCM_HTTPS_BIND_PORT_DEFAULT = 9877;
+
+  public static final String HDDS_REST_HTTP_ADDRESS_KEY =
+      "hdds.rest.http-address";
+  public static final String HDDS_REST_HTTP_ADDRESS_DEFAULT = "0.0.0.0:9880";
+  public static final String HDDS_REST_CSRF_ENABLED_KEY =
+      "hdds.rest.rest-csrf.enabled";
+  public static final boolean HDDS_REST_CSRF_ENABLED_DEFAULT = false;
+  public static final String HDDS_REST_NETTY_HIGH_WATERMARK =
+      "hdds.rest.netty.high.watermark";
+  public static final int HDDS_REST_NETTY_HIGH_WATERMARK_DEFAULT = 65536;
+  public static final int HDDS_REST_NETTY_LOW_WATERMARK_DEFAULT = 32768;
+  public static final String HDDS_REST_NETTY_LOW_WATERMARK =
+      "hdds.rest.netty.low.watermark";
+
+  public static final String OZONE_SCM_HANDLER_COUNT_KEY =
+      "ozone.scm.handler.count.key";
+  public static final int OZONE_SCM_HANDLER_COUNT_DEFAULT = 10;
+
+  public static final String OZONE_SCM_HEARTBEAT_INTERVAL =
+      "ozone.scm.heartbeat.interval";
+  public static final String OZONE_SCM_HEARBEAT_INTERVAL_DEFAULT =
+      "30s";
+
+  public static final String OZONE_SCM_DEADNODE_INTERVAL =
+      "ozone.scm.dead.node.interval";
+  public static final String OZONE_SCM_DEADNODE_INTERVAL_DEFAULT =
+      "10m";
+
+  public static final String OZONE_SCM_MAX_HB_COUNT_TO_PROCESS =
+      "ozone.scm.max.hb.count.to.process";
+  public static final int OZONE_SCM_MAX_HB_COUNT_TO_PROCESS_DEFAULT = 5000;
+
+  public static final String OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL =
+      "ozone.scm.heartbeat.thread.interval";
+  public static final String OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL_DEFAULT =
+      "3s";
+
+  public static final String OZONE_SCM_STALENODE_INTERVAL =
+      "ozone.scm.stale.node.interval";
+  public static final String OZONE_SCM_STALENODE_INTERVAL_DEFAULT =
+      "90s";
+
+  public static final String OZONE_SCM_HEARTBEAT_RPC_TIMEOUT =
+      "ozone.scm.heartbeat.rpc-timeout";
+  public static final long OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT =
+      1000;
+
+  /**
+   * Defines how frequently we will log the missing of heartbeat to a specific
+   * SCM. In the default case we will write a warning message for each 10
+   * sequential heart beats that we miss to a specific SCM. This is to avoid
+   * overrunning the log with lots of HB missed Log statements.
+   */
+  public static final String OZONE_SCM_HEARTBEAT_LOG_WARN_INTERVAL_COUNT =
+      "ozone.scm.heartbeat.log.warn.interval.count";
+  public static final int OZONE_SCM_HEARTBEAT_LOG_WARN_DEFAULT =
+      10;
+
+  // ozone.scm.names key is a set of DNS | DNS:PORT | IP Address | IP:PORT.
+  // Written as a comma separated string. e.g. scm1, scm2:8020, 7.7.7.7:7777
+  //
+  // If this key is not specified datanodes will not be able to find
+  // SCM. The SCM membership can be dynamic, so this key should contain
+  // all possible SCM names. Once the SCM leader is discovered datanodes will
+  // get the right list of SCMs to heartbeat to from the leader.
+  // While it is good for the datanodes to know the names of all SCM nodes,
+  // it is sufficient to actually know the name of on working SCM. That SCM
+  // will be able to return the information about other SCMs that are part of
+  // the SCM replicated Log.
+  //
+  //In case of a membership change, any one of the SCM machines will be
+  // able to send back a new list to the datanodes.
+  public static final String OZONE_SCM_NAMES = "ozone.scm.names";
+
+  public static final int OZONE_SCM_DEFAULT_PORT =
+      OZONE_SCM_DATANODE_PORT_DEFAULT;
+  // File Name and path where datanode ID is to written to.
+  // if this value is not set then container startup will fail.
+  public static final String OZONE_SCM_DATANODE_ID = "ozone.scm.datanode.id";
+
+  public static final String OZONE_SCM_DATANODE_ID_PATH_DEFAULT = "datanode.id";
+
+  public static final String OZONE_SCM_DB_CACHE_SIZE_MB =
+      "ozone.scm.db.cache.size.mb";
+  public static final int OZONE_SCM_DB_CACHE_SIZE_DEFAULT = 128;
+
+  public static final String OZONE_SCM_CONTAINER_SIZE_GB =
+      "ozone.scm.container.size.gb";
+  public static final int OZONE_SCM_CONTAINER_SIZE_DEFAULT = 5;
+
+  public static final String OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY =
+      "ozone.scm.container.placement.impl";
+
+  public static final String OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE =
+      "ozone.scm.container.provision_batch_size";
+  public static final int OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE_DEFAULT = 20;
+
+  public static final String OZONE_SCM_CONTAINER_DELETION_CHOOSING_POLICY =
+      "ozone.scm.container.deletion-choosing.policy";
+
+  public static final String OZONE_SCM_CONTAINER_CREATION_LEASE_TIMEOUT =
+      "ozone.scm.container.creation.lease.timeout";
+
+  public static final String
+      OZONE_SCM_CONTAINER_CREATION_LEASE_TIMEOUT_DEFAULT = "60s";
+
+  /**
+   * Don't start processing a pool if we have not had a minimum number of
+   * seconds from the last processing.
+   */
+  public static final String OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL =
+      "ozone.scm.container.report.processing.interval";
+  public static final String
+      OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL_DEFAULT = "60s";
+
+  /**
+   * This determines the total number of pools to be processed in parallel.
+   */
+  public static final String OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS =
+      "ozone.scm.max.nodepool.processing.threads";
+  public static final int OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS_DEFAULT = 1;
+  /**
+   * These 2 settings control the number of threads in executor pool and time
+   * outs for thw container reports from all nodes.
+   */
+  public static final String OZONE_SCM_MAX_CONTAINER_REPORT_THREADS =
+      "ozone.scm.max.container.report.threads";
+  public static final int OZONE_SCM_MAX_CONTAINER_REPORT_THREADS_DEFAULT = 100;
+  public static final String OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT =
+      "ozone.scm.container.reports.wait.timeout";
+  public static final String OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT_DEFAULT =
+      "5m";
+
+  public static final String OZONE_SCM_BLOCK_DELETION_MAX_RETRY =
+      "ozone.scm.block.deletion.max.retry";
+  public static final int OZONE_SCM_BLOCK_DELETION_MAX_RETRY_DEFAULT = 4096;
+
+  // Once a container usage crosses this threshold, it is eligible for
+  // closing.
+  public static final String OZONE_SCM_CONTAINER_CLOSE_THRESHOLD =
+      "ozone.scm.container.close.threshold";
+  public static final float OZONE_SCM_CONTAINER_CLOSE_THRESHOLD_DEFAULT = 0.9f;
+  /**
+   * Never constructed.
+   */
+  private ScmConfigKeys() {
+
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java
new file mode 100644
index 0000000..6236feb
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+/**
+ * ScmInfo wraps the result returned from SCM#getScmInfo which
+ * contains clusterId and the SCM Id.
+ */
+public final class ScmInfo {
+  private String clusterId;
+  private String scmId;
+
+  /**
+   * Builder for ScmInfo.
+   */
+  public static class Builder {
+    private String clusterId;
+    private String scmId;
+
+    /**
+     * sets the cluster id.
+     * @param cid clusterId to be set
+     * @return Builder for ScmInfo
+     */
+    public Builder setClusterId(String cid) {
+      this.clusterId = cid;
+      return this;
+    }
+
+    /**
+     * sets the scmId.
+     * @param id scmId
+     * @return Builder for scmInfo
+     */
+    public Builder setScmId(String id) {
+      this.scmId = id;
+      return this;
+    }
+
+    public ScmInfo build() {
+      return new ScmInfo(clusterId, scmId);
+    }
+  }
+
+  private ScmInfo(String clusterId, String scmId) {
+    this.clusterId = clusterId;
+    this.scmId = scmId;
+  }
+
+  /**
+   * Gets the clusterId from the Version file.
+   * @return ClusterId
+   */
+  public String getClusterId() {
+    return clusterId;
+  }
+
+  /**
+   * Gets the SCM Id from the Version file.
+   * @return SCM Id
+   */
+  public String getScmId() {
+    return scmId;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java
new file mode 100644
index 0000000..c96f79b
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A Client for the storageContainer protocol.
+ */
+public abstract class XceiverClientSpi implements Closeable {
+
+  final private AtomicInteger referenceCount;
+  private boolean isEvicted;
+
+  XceiverClientSpi() {
+    this.referenceCount = new AtomicInteger(0);
+    this.isEvicted = false;
+  }
+
+  void incrementReference() {
+    this.referenceCount.incrementAndGet();
+  }
+
+  void decrementReference() {
+    this.referenceCount.decrementAndGet();
+    cleanup();
+  }
+
+  void setEvicted() {
+    isEvicted = true;
+    cleanup();
+  }
+
+  // close the xceiverClient only if,
+  // 1) there is no refcount on the client
+  // 2) it has been evicted from the cache.
+  private void cleanup() {
+    if (referenceCount.get() == 0 && isEvicted) {
+      close();
+    }
+  }
+
+  @VisibleForTesting
+  public int getRefcount() {
+    return referenceCount.get();
+  }
+
+  /**
+   * Connects to the leader in the pipeline.
+   */
+  public abstract void connect() throws Exception;
+
+  @Override
+  public abstract void close();
+
+  /**
+   * Returns the pipeline of machines that host the container used by this
+   * client.
+   *
+   * @return pipeline of machines that host the container
+   */
+  public abstract Pipeline getPipeline();
+
+  /**
+   * Sends a given command to server and gets the reply back.
+   * @param request Request
+   * @return Response to the command
+   * @throws IOException
+   */
+  public abstract ContainerCommandResponseProto sendCommand(
+      ContainerCommandRequestProto request) throws IOException;
+
+  /**
+   * Sends a given command to server gets a waitable future back.
+   *
+   * @param request Request
+   * @return Response to the command
+   * @throws IOException
+   */
+  public abstract CompletableFuture<ContainerCommandResponseProto>
+      sendCommandAsync(ContainerCommandRequestProto request)
+      throws IOException, ExecutionException, InterruptedException;
+
+  /**
+   * Create a pipeline.
+   *
+   * @param pipelineID - Name of the pipeline.
+   * @param datanodes - Datanodes
+   */
+  public abstract void createPipeline(String pipelineID,
+      List<DatanodeDetails> datanodes) throws IOException;
+
+  /**
+   * Returns pipeline Type.
+   *
+   * @return - {Stand_Alone, Ratis or Chained}
+   */
+  public abstract HddsProtos.ReplicationType getPipelineType();
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
new file mode 100644
index 0000000..0d4a299
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.client;
+
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.List;
+
+/**
+ * The interface to call into underlying container layer.
+ *
+ * Written as interface to allow easy testing: implement a mock container layer
+ * for standalone testing of CBlock API without actually calling into remote
+ * containers. Actual container layer can simply re-implement this.
+ *
+ * NOTE this is temporarily needed class. When SCM containers are full-fledged,
+ * this interface will likely be removed.
+ */
+@InterfaceStability.Unstable
+public interface ScmClient {
+  /**
+   * Creates a Container on SCM and returns the pipeline.
+   * @param containerId - String container ID
+   * @return Pipeline
+   * @throws IOException
+   */
+  Pipeline createContainer(String containerId, String owner) throws IOException;
+
+  /**
+   * Gets a container by Name -- Throws if the container does not exist.
+   * @param containerId - String Container ID
+   * @return Pipeline
+   * @throws IOException
+   */
+  Pipeline getContainer(String containerId) throws IOException;
+
+  /**
+   * Close a container by name.
+   *
+   * @param pipeline the container to be closed.
+   * @throws IOException
+   */
+  void closeContainer(Pipeline pipeline) throws IOException;
+
+  /**
+   * Deletes an existing container.
+   * @param pipeline - Pipeline that represents the container.
+   * @param force - true to forcibly delete the container.
+   * @throws IOException
+   */
+  void deleteContainer(Pipeline pipeline, boolean force) throws IOException;
+
+  /**
+   * Lists a range of containers and get their info.
+   *
+   * @param startName start name, if null, start searching at the head.
+   * @param prefixName prefix name, if null, then filter is disabled.
+   * @param count count, if count < 0, the max size is unlimited.(
+   *              Usually the count will be replace with a very big
+   *              value instead of being unlimited in case the db is very big)
+   *
+   * @return a list of pipeline.
+   * @throws IOException
+   */
+  List<ContainerInfo> listContainer(String startName, String prefixName,
+      int count) throws IOException;
+
+  /**
+   * Read meta data from an existing container.
+   * @param pipeline - Pipeline that represents the container.
+   * @return ContainerInfo
+   * @throws IOException
+   */
+  ContainerData readContainer(Pipeline pipeline) throws IOException;
+
+
+  /**
+   * Gets the container size -- Computed by SCM from Container Reports.
+   * @param pipeline - Pipeline
+   * @return number of bytes used by this container.
+   * @throws IOException
+   */
+  long getContainerSize(Pipeline pipeline) throws IOException;
+
+  /**
+   * Creates a Container on SCM and returns the pipeline.
+   * @param type - Replication Type.
+   * @param replicationFactor - Replication Factor
+   * @param containerId - Container ID
+   * @return Pipeline
+   * @throws IOException - in case of error.
+   */
+  Pipeline createContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor replicationFactor, String containerId,
+      String owner) throws IOException;
+
+  /**
+   * Returns a set of Nodes that meet a query criteria.
+   * @param nodeStatuses - A set of criteria that we want the node to have.
+   * @param queryScope - Query scope - Cluster or pool.
+   * @param poolName - if it is pool, a pool name is required.
+   * @return A set of nodes that meet the requested criteria.
+   * @throws IOException
+   */
+  HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState> nodeStatuses,
+      HddsProtos.QueryScope queryScope, String poolName) throws IOException;
+
+  /**
+   * Creates a specified replication pipeline.
+   * @param type - Type
+   * @param factor - Replication factor
+   * @param nodePool - Set of machines.
+   * @throws IOException
+   */
+  Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
+      throws IOException;
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
new file mode 100644
index 0000000..e2f7033
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.client;
+
+/**
+ * This package contains classes for the client of the storage container
+ * protocol.
+ */
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
new file mode 100644
index 0000000..9520c8c
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.scm.container;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.math3.util.MathUtils;
+
+/**
+ * Container ID is an integer that is a value between 1..MAX_CONTAINER ID.
+ * <p>
+ * We are creating a specific type for this to avoid mixing this with
+ * normal integers in code.
+ */
+public class ContainerID implements Comparable {
+
+  private final long id;
+
+  /**
+   * Constructs ContainerID.
+   *
+   * @param id int
+   */
+  public ContainerID(long id) {
+    Preconditions.checkState(id > 0,
+        "Container ID should be a positive int");
+    this.id = id;
+  }
+
+  /**
+   * Factory method for creation of ContainerID.
+   * @param containerID  long
+   * @return ContainerID.
+   */
+  public static ContainerID valueof(long containerID) {
+    Preconditions.checkState(containerID > 0);
+    return new ContainerID(containerID);
+  }
+
+  /**
+   * Returns int representation of ID.
+   *
+   * @return int
+   */
+  public long getId() {
+    return id;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    ContainerID that = (ContainerID) o;
+
+    return id == that.id;
+  }
+
+  @Override
+  public int hashCode() {
+    return MathUtils.hash(id);
+  }
+
+  @Override
+  public int compareTo(Object o) {
+    Preconditions.checkNotNull(o);
+    if (o instanceof ContainerID) {
+      return Long.compare(((ContainerID) o).getId(), this.getId());
+    }
+    throw new IllegalArgumentException("Object O, should be an instance " +
+        "of ContainerID");
+  }
+
+  @Override
+  public String toString() {
+    return "id=" + id;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
new file mode 100644
index 0000000..d253b15
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.common.helpers;
+
+/**
+ * Allocated block wraps the result returned from SCM#allocateBlock which
+ * contains a Pipeline and the key.
+ */
+public final class AllocatedBlock {
+  private Pipeline pipeline;
+  private String key;
+  // Indicates whether the client should create container before writing block.
+  private boolean shouldCreateContainer;
+
+  /**
+   * Builder for AllocatedBlock.
+   */
+  public static class Builder {
+    private Pipeline pipeline;
+    private String key;
+    private boolean shouldCreateContainer;
+
+    public Builder setPipeline(Pipeline p) {
+      this.pipeline = p;
+      return this;
+    }
+
+    public Builder setKey(String k) {
+      this.key = k;
+      return this;
+    }
+
+    public Builder setShouldCreateContainer(boolean shouldCreate) {
+      this.shouldCreateContainer = shouldCreate;
+      return this;
+    }
+
+    public AllocatedBlock build() {
+      return new AllocatedBlock(pipeline, key, shouldCreateContainer);
+    }
+  }
+
+  private AllocatedBlock(Pipeline pipeline, String key,
+      boolean shouldCreateContainer) {
+    this.pipeline = pipeline;
+    this.key = key;
+    this.shouldCreateContainer = shouldCreateContainer;
+  }
+
+  public Pipeline getPipeline() {
+    return pipeline;
+  }
+
+  public String getKey() {
+    return key;
+  }
+
+  public boolean getCreateContainer() {
+    return shouldCreateContainer;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
new file mode 100644
index 0000000..823a7fb
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
@@ -0,0 +1,333 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.hadoop.hdds.scm.container.ContainerID;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.util.Time;
+
+import java.util.Comparator;
+
+/**
+ * Class wraps ozone container info.
+ */
+public class ContainerInfo
+    implements Comparator<ContainerInfo>, Comparable<ContainerInfo> {
+  private HddsProtos.LifeCycleState state;
+  private Pipeline pipeline;
+  // Bytes allocated by SCM for clients.
+  private long allocatedBytes;
+  // Actual container usage, updated through heartbeat.
+  private long usedBytes;
+  private long numberOfKeys;
+  private long lastUsed;
+  // The wall-clock ms since the epoch at which the current state enters.
+  private long stateEnterTime;
+  private String owner;
+  private String containerName;
+  private long containerID;
+  ContainerInfo(
+      long containerID,
+      final String containerName,
+      HddsProtos.LifeCycleState state,
+      Pipeline pipeline,
+      long allocatedBytes,
+      long usedBytes,
+      long numberOfKeys,
+      long stateEnterTime,
+      String owner) {
+    this.containerID = containerID;
+    this.containerName = containerName;
+    this.pipeline = pipeline;
+    this.allocatedBytes = allocatedBytes;
+    this.usedBytes = usedBytes;
+    this.numberOfKeys = numberOfKeys;
+    this.lastUsed = Time.monotonicNow();
+    this.state = state;
+    this.stateEnterTime = stateEnterTime;
+    this.owner = owner;
+  }
+
+  /**
+   * Needed for serialization findbugs.
+   */
+  public ContainerInfo() {
+  }
+
+  public static ContainerInfo fromProtobuf(HddsProtos.SCMContainerInfo info) {
+    ContainerInfo.Builder builder = new ContainerInfo.Builder();
+    builder.setPipeline(Pipeline.getFromProtoBuf(info.getPipeline()));
+    builder.setAllocatedBytes(info.getAllocatedBytes());
+    builder.setUsedBytes(info.getUsedBytes());
+    builder.setNumberOfKeys(info.getNumberOfKeys());
+    builder.setState(info.getState());
+    builder.setStateEnterTime(info.getStateEnterTime());
+    builder.setOwner(info.getOwner());
+    builder.setContainerName(info.getContainerName());
+    builder.setContainerID(info.getContainerID());
+    return builder.build();
+  }
+
+  public long getContainerID() {
+    return containerID;
+  }
+
+  public String getContainerName() {
+    return containerName;
+  }
+
+  public HddsProtos.LifeCycleState getState() {
+    return state;
+  }
+
+  public void setState(HddsProtos.LifeCycleState state) {
+    this.state = state;
+  }
+
+  public long getStateEnterTime() {
+    return stateEnterTime;
+  }
+
+  public Pipeline getPipeline() {
+    return pipeline;
+  }
+
+  public long getAllocatedBytes() {
+    return allocatedBytes;
+  }
+
+  /**
+   * Set Allocated bytes.
+   *
+   * @param size - newly allocated bytes -- negative size is case of deletes
+   * can be used.
+   */
+  public void updateAllocatedBytes(long size) {
+    this.allocatedBytes += size;
+  }
+
+  public long getUsedBytes() {
+    return usedBytes;
+  }
+
+  public long getNumberOfKeys() {
+    return numberOfKeys;
+  }
+
+  public ContainerID containerID() {
+    return new ContainerID(getContainerID());
+  }
+
+  /**
+   * Gets the last used time from SCM's perspective.
+   *
+   * @return time in milliseconds.
+   */
+  public long getLastUsed() {
+    return lastUsed;
+  }
+
+  public void updateLastUsedTime() {
+    lastUsed = Time.monotonicNow();
+  }
+
+  public void allocate(long size) {
+    // should we also have total container size in ContainerInfo
+    // and check before allocating?
+    allocatedBytes += size;
+  }
+
+  public HddsProtos.SCMContainerInfo getProtobuf() {
+    HddsProtos.SCMContainerInfo.Builder builder =
+        HddsProtos.SCMContainerInfo.newBuilder();
+    builder.setPipeline(getPipeline().getProtobufMessage());
+    builder.setAllocatedBytes(getAllocatedBytes());
+    builder.setUsedBytes(getUsedBytes());
+    builder.setNumberOfKeys(getNumberOfKeys());
+    builder.setState(state);
+    builder.setStateEnterTime(stateEnterTime);
+    builder.setContainerID(getContainerID());
+
+    if (getOwner() != null) {
+      builder.setOwner(getOwner());
+    }
+    builder.setContainerName(getContainerName());
+    return builder.build();
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  @Override
+  public String toString() {
+    return "ContainerInfo{"
+        + "state=" + state
+        + ", pipeline=" + pipeline
+        + ", stateEnterTime=" + stateEnterTime
+        + ", owner=" + owner
+        + ", containerName='" + containerName
+        + '}';
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    ContainerInfo that = (ContainerInfo) o;
+
+    return new EqualsBuilder()
+        .append(pipeline.getContainerName(), that.pipeline.getContainerName())
+
+        // TODO : Fix this later. If we add these factors some tests fail.
+        // So Commenting this to continue and will enforce this with
+        // Changes in pipeline where we remove Container Name to
+        // SCMContainerinfo from Pipeline.
+        // .append(pipeline.getFactor(), that.pipeline.getFactor())
+        // .append(pipeline.getType(), that.pipeline.getType())
+        .append(owner, that.owner)
+        .isEquals();
+  }
+
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder(11, 811)
+        .append(pipeline.getContainerName())
+        .append(pipeline.getFactor())
+        .append(pipeline.getType())
+        .append(owner)
+        .toHashCode();
+  }
+
+  /**
+   * Compares its two arguments for order.  Returns a negative integer, zero, or
+   * a positive integer as the first argument is less than, equal to, or greater
+   * than the second.<p>
+   *
+   * @param o1 the first object to be compared.
+   * @param o2 the second object to be compared.
+   * @return a negative integer, zero, or a positive integer as the first
+   * argument is less than, equal to, or greater than the second.
+   * @throws NullPointerException if an argument is null and this comparator
+   *                              does not permit null arguments
+   * @throws ClassCastException   if the arguments' types prevent them from
+   *                              being compared by this comparator.
+   */
+  @Override
+  public int compare(ContainerInfo o1, ContainerInfo o2) {
+    return Long.compare(o1.getLastUsed(), o2.getLastUsed());
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less than,
+   * equal to, or greater than the specified object.
+   *
+   * @param o the object to be compared.
+   * @return a negative integer, zero, or a positive integer as this object is
+   * less than, equal to, or greater than the specified object.
+   * @throws NullPointerException if the specified object is null
+   * @throws ClassCastException   if the specified object's type prevents it
+   *                              from being compared to this object.
+   */
+  @Override
+  public int compareTo(ContainerInfo o) {
+    return this.compare(this, o);
+  }
+
+  /**
+   * Builder class for ContainerInfo.
+   */
+  public static class Builder {
+    private HddsProtos.LifeCycleState state;
+    private Pipeline pipeline;
+    private long allocated;
+    private long used;
+    private long keys;
+    private long stateEnterTime;
+    private String owner;
+    private String containerName;
+    private long containerID;
+
+    public Builder setContainerID(long id) {
+      Preconditions.checkState(id >= 0);
+      this.containerID = id;
+      return this;
+    }
+
+    public Builder setState(HddsProtos.LifeCycleState lifeCycleState) {
+      this.state = lifeCycleState;
+      return this;
+    }
+
+    public Builder setPipeline(Pipeline containerPipeline) {
+      this.pipeline = containerPipeline;
+      return this;
+    }
+
+    public Builder setAllocatedBytes(long bytesAllocated) {
+      this.allocated = bytesAllocated;
+      return this;
+    }
+
+    public Builder setUsedBytes(long bytesUsed) {
+      this.used = bytesUsed;
+      return this;
+    }
+
+    public Builder setNumberOfKeys(long keyCount) {
+      this.keys = keyCount;
+      return this;
+    }
+
+    public Builder setStateEnterTime(long time) {
+      this.stateEnterTime = time;
+      return this;
+    }
+
+    public Builder setOwner(String containerOwner) {
+      this.owner = containerOwner;
+      return this;
+    }
+
+    public Builder setContainerName(String container) {
+      this.containerName = container;
+      return this;
+    }
+
+    public ContainerInfo build() {
+      return new
+          ContainerInfo(containerID, containerName, state, pipeline,
+          allocated, used, keys, stateEnterTime, owner);
+    }
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java
new file mode 100644
index 0000000..fd97eae
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.common.helpers;
+
+import static org.apache.hadoop.hdds.protocol.proto
+    .ScmBlockLocationProtocolProtos.DeleteScmBlockResult;
+
+/**
+ * Class wraps storage container manager block deletion results.
+ */
+public class DeleteBlockResult {
+  private String key;
+  private DeleteScmBlockResult.Result result;
+
+  public DeleteBlockResult(final String key,
+      final DeleteScmBlockResult.Result result) {
+    this.key = key;
+    this.result = result;
+  }
+
+  /**
+   * Get key deleted.
+   * @return key name.
+   */
+  public String getKey() {
+    return key;
+  }
+
+  /**
+   * Get key deletion result.
+   * @return key deletion result.
+   */
+  public DeleteScmBlockResult.Result getResult() {
+    return result;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
new file mode 100644
index 0000000..32d0a2d
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.common.helpers;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonFilter;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
+import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * A pipeline represents the group of machines over which a container lives.
+ */
+public class Pipeline {
+  static final String PIPELINE_INFO = "PIPELINE_INFO_FILTER";
+  private static final ObjectWriter WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    String[] ignorableFieldNames = {"data"};
+    FilterProvider filters = new SimpleFilterProvider()
+        .addFilter(PIPELINE_INFO, SimpleBeanPropertyFilter
+            .serializeAllExcept(ignorableFieldNames));
+    mapper.setVisibility(PropertyAccessor.FIELD,
+        JsonAutoDetect.Visibility.ANY);
+    mapper.addMixIn(Object.class, MixIn.class);
+
+    WRITER = mapper.writer(filters);
+  }
+
+  private String containerName;
+  private PipelineChannel pipelineChannel;
+  /**
+   * Allows you to maintain private data on pipelines. This is not serialized
+   * via protobuf, just allows us to maintain some private data.
+   */
+  @JsonIgnore
+  private byte[] data;
+  /**
+   * Constructs a new pipeline data structure.
+   *
+   * @param containerName - Container
+   * @param pipelineChannel - transport information for this container
+   */
+  public Pipeline(String containerName, PipelineChannel pipelineChannel) {
+    this.containerName = containerName;
+    this.pipelineChannel = pipelineChannel;
+    data = null;
+  }
+
+  /**
+   * Gets pipeline object from protobuf.
+   *
+   * @param pipeline - ProtoBuf definition for the pipeline.
+   * @return Pipeline Object
+   */
+  public static Pipeline getFromProtoBuf(HddsProtos.Pipeline pipeline) {
+    Preconditions.checkNotNull(pipeline);
+    PipelineChannel pipelineChannel =
+        PipelineChannel.getFromProtoBuf(pipeline.getPipelineChannel());
+    return new Pipeline(pipeline.getContainerName(), pipelineChannel);
+  }
+
+  public HddsProtos.ReplicationFactor getFactor() {
+    return pipelineChannel.getFactor();
+  }
+
+  /**
+   * Returns the first machine in the set of datanodes.
+   *
+   * @return First Machine.
+   */
+  @JsonIgnore
+  public DatanodeDetails getLeader() {
+    return pipelineChannel.getDatanodes().get(pipelineChannel.getLeaderID());
+  }
+
+  /**
+   * Returns the leader host.
+   *
+   * @return First Machine.
+   */
+  public String getLeaderHost() {
+    return pipelineChannel.getDatanodes()
+        .get(pipelineChannel.getLeaderID()).getHostName();
+  }
+
+  /**
+   * Returns all machines that make up this pipeline.
+   *
+   * @return List of Machines.
+   */
+  @JsonIgnore
+  public List<DatanodeDetails> getMachines() {
+    return new ArrayList<>(pipelineChannel.getDatanodes().values());
+  }
+
+  /**
+   * Returns all machines that make up this pipeline.
+   *
+   * @return List of Machines.
+   */
+  public List<String> getDatanodeHosts() {
+    List<String> dataHosts = new ArrayList<>();
+    for (DatanodeDetails id : pipelineChannel.getDatanodes().values()) {
+      dataHosts.add(id.getHostName());
+    }
+    return dataHosts;
+  }
+
+  /**
+   * Return a Protobuf Pipeline message from pipeline.
+   *
+   * @return Protobuf message
+   */
+  @JsonIgnore
+  public HddsProtos.Pipeline getProtobufMessage() {
+    HddsProtos.Pipeline.Builder builder =
+        HddsProtos.Pipeline.newBuilder();
+    builder.setContainerName(this.containerName);
+    builder.setPipelineChannel(this.pipelineChannel.getProtobufMessage());
+    return builder.build();
+  }
+
+  /**
+   * Returns containerName if available.
+   *
+   * @return String.
+   */
+  public String getContainerName() {
+    return containerName;
+  }
+
+  /**
+   * Returns private data that is set on this pipeline.
+   *
+   * @return blob, the user can interpret it any way they like.
+   */
+  public byte[] getData() {
+    if (this.data != null) {
+      return Arrays.copyOf(this.data, this.data.length);
+    } else {
+      return null;
+    }
+  }
+
+  @VisibleForTesting
+  public PipelineChannel getPipelineChannel() {
+    return pipelineChannel;
+  }
+
+  /**
+   * Set private data on pipeline.
+   *
+   * @param data -- private data.
+   */
+  public void setData(byte[] data) {
+    if (data != null) {
+      this.data = Arrays.copyOf(data, data.length);
+    }
+  }
+
+  /**
+   * Gets the State of the pipeline.
+   *
+   * @return - LifeCycleStates.
+   */
+  public HddsProtos.LifeCycleState getLifeCycleState() {
+    return pipelineChannel.getLifeCycleState();
+  }
+
+  /**
+   * Gets the pipeline Name.
+   *
+   * @return - Name of the pipeline
+   */
+  public String getPipelineName() {
+    return pipelineChannel.getName();
+  }
+
+  /**
+   * Returns the type.
+   *
+   * @return type - Standalone, Ratis, Chained.
+   */
+  public HddsProtos.ReplicationType getType() {
+    return pipelineChannel.getType();
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder b = new StringBuilder(getClass().getSimpleName())
+        .append("[");
+    pipelineChannel.getDatanodes().keySet().stream()
+        .forEach(id -> b.
+            append(id.endsWith(pipelineChannel.getLeaderID()) ? "*" + id : id));
+    b.append("] container:").append(containerName);
+    b.append(" name:").append(getPipelineName());
+    if (getType() != null) {
+      b.append(" type:").append(getType().toString());
+    }
+    if (getFactor() != null) {
+      b.append(" factor:").append(getFactor().toString());
+    }
+    if (getLifeCycleState() != null) {
+      b.append(" State:").append(getLifeCycleState().toString());
+    }
+    return b.toString();
+  }
+
+  /**
+   * Returns a JSON string of this object.
+   *
+   * @return String - json string
+   * @throws IOException
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  @JsonFilter(PIPELINE_INFO)
+  class MixIn {
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java
new file mode 100644
index 0000000..ebd52e9
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.container.common.helpers;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * PipelineChannel information for a {@link Pipeline}.
+ */
+public class PipelineChannel {
+  @JsonIgnore
+  private String leaderID;
+  @JsonIgnore
+  private Map<String, DatanodeDetails> datanodes;
+  private LifeCycleState lifeCycleState;
+  private ReplicationType type;
+  private ReplicationFactor factor;
+  private String name;
+
+  public PipelineChannel(String leaderID, LifeCycleState lifeCycleState,
+      ReplicationType replicationType, ReplicationFactor replicationFactor,
+      String name) {
+    this.leaderID = leaderID;
+    this.lifeCycleState = lifeCycleState;
+    this.type = replicationType;
+    this.factor = replicationFactor;
+    this.name = name;
+    datanodes = new TreeMap<>();
+  }
+
+  public String getLeaderID() {
+    return leaderID;
+  }
+
+  public Map<String, DatanodeDetails> getDatanodes() {
+    return datanodes;
+  }
+
+  public LifeCycleState getLifeCycleState() {
+    return lifeCycleState;
+  }
+
+  public ReplicationType getType() {
+    return type;
+  }
+
+  public ReplicationFactor getFactor() {
+    return factor;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void addMember(DatanodeDetails datanodeDetails) {
+    datanodes.put(datanodeDetails.getUuid().toString(),
+        datanodeDetails);
+  }
+
+  @JsonIgnore
+  public HddsProtos.PipelineChannel getProtobufMessage() {
+    HddsProtos.PipelineChannel.Builder builder =
+        HddsProtos.PipelineChannel.newBuilder();
+    for (DatanodeDetails datanode : datanodes.values()) {
+      builder.addMembers(datanode.getProtoBufMessage());
+    }
+    builder.setLeaderID(leaderID);
+
+    if (this.getLifeCycleState() != null) {
+      builder.setState(this.getLifeCycleState());
+    }
+    if (this.getType() != null) {
+      builder.setType(this.getType());
+    }
+
+    if (this.getFactor() != null) {
+      builder.setFactor(this.getFactor());
+    }
+    return builder.build();
+  }
+
+  public static PipelineChannel getFromProtoBuf(
+      HddsProtos.PipelineChannel transportProtos) {
+    Preconditions.checkNotNull(transportProtos);
+    PipelineChannel pipelineChannel =
+        new PipelineChannel(transportProtos.getLeaderID(),
+            transportProtos.getState(),
+            transportProtos.getType(),
+            transportProtos.getFactor(),
+            transportProtos.getName());
+
+    for (HddsProtos.DatanodeDetailsProto dataID :
+        transportProtos.getMembersList()) {
+      pipelineChannel.addMember(DatanodeDetails.getFromProtoBuf(dataID));
+    }
+    return pipelineChannel;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java
new file mode 100644
index 0000000..35d8444
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.container.common.helpers;
+
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+
+import java.io.IOException;
+
+/**
+ * Exceptions thrown from the Storage Container.
+ */
+public class StorageContainerException extends IOException {
+  private ContainerProtos.Result result;
+
+  /**
+   * Constructs an {@code IOException} with {@code null}
+   * as its error detail message.
+   */
+  public StorageContainerException(ContainerProtos.Result result) {
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified detail message.
+   *
+   * @param message The detail message (which is saved for later retrieval by
+   * the {@link #getMessage()} method)
+   * @param result - The result code
+   */
+  public StorageContainerException(String message,
+      ContainerProtos.Result result) {
+    super(message);
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified detail message
+   * and cause.
+   * <p>
+   * <p> Note that the detail message associated with {@code cause} is
+   * <i>not</i> automatically incorporated into this exception's detail
+   * message.
+   *
+   * @param message The detail message (which is saved for later retrieval by
+   * the {@link #getMessage()} method)
+   *
+   * @param cause The cause (which is saved for later retrieval by the {@link
+   * #getCause()} method).  (A null value is permitted, and indicates that the
+   * cause is nonexistent or unknown.)
+   *
+   * @param result - The result code
+   * @since 1.6
+   */
+  public StorageContainerException(String message, Throwable cause,
+      ContainerProtos.Result result) {
+    super(message, cause);
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified cause and a
+   * detail message of {@code (cause==null ? null : cause.toString())}
+   * (which typically contains the class and detail message of {@code cause}).
+   * This constructor is useful for IO exceptions that are little more
+   * than wrappers for other throwables.
+   *
+   * @param cause The cause (which is saved for later retrieval by the {@link
+   * #getCause()} method).  (A null value is permitted, and indicates that the
+   * cause is nonexistent or unknown.)
+   * @param result - The result code
+   * @since 1.6
+   */
+  public StorageContainerException(Throwable cause, ContainerProtos.Result
+      result) {
+    super(cause);
+    this.result = result;
+  }
+
+  /**
+   * Returns Result.
+   *
+   * @return Result.
+   */
+  public ContainerProtos.Result getResult() {
+    return result;
+  }
+
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java
new file mode 100644
index 0000000..ffe0d3d
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.container.common.helpers;
+/**
+ Contains protocol buffer helper classes and utilites used in
+ impl.
+ **/
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
new file mode 100644
index 0000000..d13dcb1
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.container;
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
new file mode 100644
index 0000000..3c544db
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+/**
+ * This package contains classes for the client of the storage container
+ * protocol.
+ */
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java
new file mode 100644
index 0000000..14ee3d2
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.protocol;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+
+import java.util.Set;
+
+/**
+ * Holds the nodes that currently host the container for an object key hash.
+ */
+@InterfaceAudience.Private
+public final class LocatedContainer {
+  private final String key;
+  private final String matchedKeyPrefix;
+  private final String containerName;
+  private final Set<DatanodeInfo> locations;
+  private final DatanodeInfo leader;
+
+  /**
+   * Creates a LocatedContainer.
+   *
+   * @param key object key
+   * @param matchedKeyPrefix prefix of key that was used to find the location
+   * @param containerName container name
+   * @param locations nodes that currently host the container
+   * @param leader node that currently acts as pipeline leader
+   */
+  public LocatedContainer(String key, String matchedKeyPrefix,
+      String containerName, Set<DatanodeInfo> locations, DatanodeInfo leader) {
+    this.key = key;
+    this.matchedKeyPrefix = matchedKeyPrefix;
+    this.containerName = containerName;
+    this.locations = locations;
+    this.leader = leader;
+  }
+
+  /**
+   * Returns the container name.
+   *
+   * @return container name
+   */
+  public String getContainerName() {
+    return this.containerName;
+  }
+
+  /**
+   * Returns the object key.
+   *
+   * @return object key
+   */
+  public String getKey() {
+    return this.key;
+  }
+
+  /**
+   * Returns the node that currently acts as pipeline leader.
+   *
+   * @return node that currently acts as pipeline leader
+   */
+  public DatanodeInfo getLeader() {
+    return this.leader;
+  }
+
+  /**
+   * Returns the nodes that currently host the container.
+   *
+   * @return Set<DatanodeInfo> nodes that currently host the container
+   */
+  public Set<DatanodeInfo> getLocations() {
+    return this.locations;
+  }
+
+  /**
+   * Returns the prefix of the key that was used to find the location.
+   *
+   * @return prefix of the key that was used to find the location
+   */
+  public String getMatchedKeyPrefix() {
+    return this.matchedKeyPrefix;
+  }
+
+  @Override
+  public boolean equals(Object otherObj) {
+    if (otherObj == null) {
+      return false;
+    }
+    if (!(otherObj instanceof LocatedContainer)) {
+      return false;
+    }
+    LocatedContainer other = (LocatedContainer)otherObj;
+    return this.key == null ? other.key == null : this.key.equals(other.key);
+  }
+
+  @Override
+  public int hashCode() {
+    return key.hashCode();
+  }
+
+  @Override
+  public String toString() {
+    return getClass().getSimpleName()
+        + "{key=" + key
+        + "; matchedKeyPrefix=" + matchedKeyPrefix
+        + "; containerName=" + containerName
+        + "; locations=" + locations
+        + "; leader=" + leader
+        + "}";
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
new file mode 100644
index 0000000..f100fc7
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.protocol;
+
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * ScmBlockLocationProtocol is used by an HDFS node to find the set of nodes
+ * to read/write a block.
+ */
+public interface ScmBlockLocationProtocol {
+
+  /**
+   * Find the set of nodes to read/write a block, as
+   * identified by the block key.  This method supports batch lookup by
+   * passing multiple keys.
+   *
+   * @param keys batch of block keys to find
+   * @return allocated blocks for each block key
+   * @throws IOException if there is any failure
+   */
+  Set<AllocatedBlock> getBlockLocations(Set<String> keys) throws IOException;
+
+  /**
+   * Asks SCM where a block should be allocated. SCM responds with the
+   * set of datanodes that should be used creating this block.
+   * @param size - size of the block.
+   * @return allocated block accessing info (key, pipeline).
+   * @throws IOException
+   */
+  AllocatedBlock allocateBlock(long size, ReplicationType type,
+      ReplicationFactor factor, String owner) throws IOException;
+
+  /**
+   * Delete blocks for a set of object keys.
+   *
+   * @param keyBlocksInfoList Map of object key and its blocks.
+   * @return list of block deletion results.
+   * @throws IOException if there is any failure.
+   */
+  List<DeleteBlockGroupResult>
+      deleteKeyBlocks(List<BlockGroup> keyBlocksInfoList) throws IOException;
+
+  /**
+   * Gets the Clusterid and SCM Id from SCM.
+   */
+  ScmInfo getScmInfo() throws IOException;
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java
new file mode 100644
index 0000000..6cbdee4
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.protocol;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Holds the nodes that currently host the block for a block key.
+ */
+@InterfaceAudience.Private
+public final class ScmLocatedBlock {
+  private final String key;
+  private final List<DatanodeInfo> locations;
+  private final DatanodeInfo leader;
+
+  /**
+   * Creates a ScmLocatedBlock.
+   *
+   * @param key object key
+   * @param locations nodes that currently host the block
+   * @param leader node that currently acts as pipeline leader
+   */
+  public ScmLocatedBlock(final String key, final List<DatanodeInfo> locations,
+      final DatanodeInfo leader) {
+    this.key = key;
+    this.locations = locations;
+    this.leader = leader;
+  }
+
+  /**
+   * Returns the object key.
+   *
+   * @return object key
+   */
+  public String getKey() {
+    return this.key;
+  }
+
+  /**
+   * Returns the node that currently acts as pipeline leader.
+   *
+   * @return node that currently acts as pipeline leader
+   */
+  public DatanodeInfo getLeader() {
+    return this.leader;
+  }
+
+  /**
+   * Returns the nodes that currently host the block.
+   *
+   * @return List<DatanodeInfo> nodes that currently host the block
+   */
+  public List<DatanodeInfo> getLocations() {
+    return this.locations;
+  }
+
+  @Override
+  public boolean equals(Object otherObj) {
+    if (otherObj == null) {
+      return false;
+    }
+    if (!(otherObj instanceof ScmLocatedBlock)) {
+      return false;
+    }
+    ScmLocatedBlock other = (ScmLocatedBlock)otherObj;
+    return this.key == null ? other.key == null : this.key.equals(other.key);
+  }
+
+  @Override
+  public int hashCode() {
+    return key.hashCode();
+  }
+
+  @Override
+  public String toString() {
+    return getClass().getSimpleName() + "{key=" + key + "; locations="
+        + locations.stream().map(loc -> loc.toString()).collect(Collectors
+            .joining(",")) + "; leader=" + leader + "}";
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
new file mode 100644
index 0000000..a60fbb2
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.protocol;
+
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
+
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.List;
+
+/**
+ * ContainerLocationProtocol is used by an HDFS node to find the set of nodes
+ * that currently host a container.
+ */
+public interface StorageContainerLocationProtocol {
+  /**
+   * Asks SCM where a container should be allocated. SCM responds with the
+   * set of datanodes that should be used creating this container.
+   *
+   */
+  Pipeline allocateContainer(HddsProtos.ReplicationType replicationType,
+      HddsProtos.ReplicationFactor factor, String containerName, String owner)
+      throws IOException;
+
+  /**
+   * Ask SCM the location of the container. SCM responds with a group of
+   * nodes where this container and its replicas are located.
+   *
+   * @param containerName - Name of the container.
+   * @return Pipeline - the pipeline where container locates.
+   * @throws IOException
+   */
+  Pipeline getContainer(String containerName) throws IOException;
+
+  /**
+   * Ask SCM a list of containers with a range of container names
+   * and the limit of count.
+   * Search container names between start name(exclusive), and
+   * use prefix name to filter the result. the max size of the
+   * searching range cannot exceed the value of count.
+   *
+   * @param startName start name, if null, start searching at the head.
+   * @param prefixName prefix name, if null, then filter is disabled.
+   * @param count count, if count < 0, the max size is unlimited.(
+   *              Usually the count will be replace with a very big
+   *              value instead of being unlimited in case the db is very big)
+   *
+   * @return a list of container.
+   * @throws IOException
+   */
+  List<ContainerInfo> listContainer(String startName, String prefixName,
+      int count) throws IOException;
+
+  /**
+   * Deletes a container in SCM.
+   *
+   * @param containerName
+   * @throws IOException
+   *   if failed to delete the container mapping from db store
+   *   or container doesn't exist.
+   */
+  void deleteContainer(String containerName) throws IOException;
+
+  /**
+   *  Queries a list of Node Statuses.
+   * @param nodeStatuses
+   * @return List of Datanodes.
+   */
+  HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState> nodeStatuses,
+      HddsProtos.QueryScope queryScope, String poolName) throws IOException;
+
+  /**
+   * Notify from client when begin or finish creating objects like pipeline
+   * or containers on datanodes.
+   * Container will be in Operational state after that.
+   * @param type object type
+   * @param name object name
+   * @param op operation type (e.g., create, close, delete)
+   * @param stage creation stage
+   */
+  void notifyObjectStageChange(
+      ObjectStageChangeRequestProto.Type type, String name,
+      ObjectStageChangeRequestProto.Op op,
+      ObjectStageChangeRequestProto.Stage stage) throws IOException;
+
+  /**
+   * Creates a replication pipeline of a specified type.
+   * @param type - replication type
+   * @param factor - factor 1 or 3
+   * @param nodePool - optional machine list to build a pipeline.
+   * @throws IOException
+   */
+  Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
+      throws IOException;
+
+  /**
+   * Returns information about SCM.
+   *
+   * @return {@link ScmInfo}
+   * @throws IOException
+   */
+  ScmInfo getScmInfo() throws IOException;
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java
new file mode 100644
index 0000000..b56a749
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.protocol;
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java
new file mode 100644
index 0000000..0012f3e
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.protocolPB;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .AllocateScmBlockRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .AllocateScmBlockResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmKeyBlocksRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmKeyBlocksResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .GetScmBlockLocationsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .GetScmBlockLocationsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .KeyBlocks;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .ScmLocatedBlockProto;
+import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.ProtocolTranslator;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This class is the client-side translator to translate the requests made on
+ * the {@link ScmBlockLocationProtocol} interface to the RPC server
+ * implementing {@link ScmBlockLocationProtocolPB}.
+ */
+@InterfaceAudience.Private
+public final class ScmBlockLocationProtocolClientSideTranslatorPB
+    implements ScmBlockLocationProtocol, ProtocolTranslator, Closeable {
+
+  /**
+   * RpcController is not used and hence is set to null.
+   */
+  private static final RpcController NULL_RPC_CONTROLLER = null;
+
+  private final ScmBlockLocationProtocolPB rpcProxy;
+
+  /**
+   * Creates a new StorageContainerLocationProtocolClientSideTranslatorPB.
+   *
+   * @param rpcProxy {@link StorageContainerLocationProtocolPB} RPC proxy
+   */
+  public ScmBlockLocationProtocolClientSideTranslatorPB(
+      ScmBlockLocationProtocolPB rpcProxy) {
+    this.rpcProxy = rpcProxy;
+  }
+
+  /**
+   * Find the set of nodes to read/write a block, as
+   * identified by the block key.  This method supports batch lookup by
+   * passing multiple keys.
+   *
+   * @param keys batch of block keys to find
+   * @return allocated blocks for each block key
+   * @throws IOException if there is any failure
+   */
+  @Override
+  public Set<AllocatedBlock> getBlockLocations(Set<String> keys)
+      throws IOException {
+    GetScmBlockLocationsRequestProto.Builder req =
+        GetScmBlockLocationsRequestProto.newBuilder();
+    for (String key : keys) {
+      req.addKeys(key);
+    }
+    final GetScmBlockLocationsResponseProto resp;
+    try {
+      resp = rpcProxy.getScmBlockLocations(NULL_RPC_CONTROLLER,
+          req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    Set<AllocatedBlock> locatedBlocks =
+        Sets.newLinkedHashSetWithExpectedSize(resp.getLocatedBlocksCount());
+    for (ScmLocatedBlockProto locatedBlock : resp.getLocatedBlocksList()) {
+      locatedBlocks.add(new AllocatedBlock.Builder()
+          .setKey(locatedBlock.getKey())
+          .setPipeline(Pipeline.getFromProtoBuf(locatedBlock.getPipeline()))
+          .build());
+    }
+    return locatedBlocks;
+  }
+
+  /**
+   * Asks SCM where a block should be allocated. SCM responds with the
+   * set of datanodes that should be used creating this block.
+   * @param size - size of the block.
+   * @return allocated block accessing info (key, pipeline).
+   * @throws IOException
+   */
+  @Override
+  public AllocatedBlock allocateBlock(long size,
+      HddsProtos.ReplicationType type, HddsProtos.ReplicationFactor factor,
+      String owner) throws IOException {
+    Preconditions.checkArgument(size > 0, "block size must be greater than 0");
+
+    AllocateScmBlockRequestProto request =
+        AllocateScmBlockRequestProto.newBuilder().setSize(size).setType(type)
+            .setFactor(factor).setOwner(owner).build();
+    final AllocateScmBlockResponseProto response;
+    try {
+      response = rpcProxy.allocateScmBlock(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (response.getErrorCode() !=
+        AllocateScmBlockResponseProto.Error.success) {
+      throw new IOException(response.hasErrorMessage() ?
+          response.getErrorMessage() : "Allocate block failed.");
+    }
+    AllocatedBlock.Builder builder = new AllocatedBlock.Builder()
+        .setKey(response.getKey())
+        .setPipeline(Pipeline.getFromProtoBuf(response.getPipeline()))
+        .setShouldCreateContainer(response.getCreateContainer());
+    return builder.build();
+  }
+
+  /**
+   * Delete the set of keys specified.
+   *
+   * @param keyBlocksInfoList batch of block keys to delete.
+   * @return list of block deletion results.
+   * @throws IOException if there is any failure.
+   *
+   */
+  @Override
+  public List<DeleteBlockGroupResult> deleteKeyBlocks(
+      List<BlockGroup> keyBlocksInfoList) throws IOException {
+    List<KeyBlocks> keyBlocksProto = keyBlocksInfoList.stream()
+        .map(BlockGroup::getProto).collect(Collectors.toList());
+    DeleteScmKeyBlocksRequestProto request = DeleteScmKeyBlocksRequestProto
+        .newBuilder().addAllKeyBlocks(keyBlocksProto).build();
+
+    final DeleteScmKeyBlocksResponseProto resp;
+    try {
+      resp = rpcProxy.deleteScmKeyBlocks(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    List<DeleteBlockGroupResult> results =
+        new ArrayList<>(resp.getResultsCount());
+    results.addAll(resp.getResultsList().stream().map(
+        result -> new DeleteBlockGroupResult(result.getObjectKey(),
+            DeleteBlockGroupResult
+                .convertBlockResultProto(result.getBlockResultsList())))
+        .collect(Collectors.toList()));
+    return results;
+  }
+
+  /**
+   * Gets the cluster Id and Scm Id from SCM.
+   * @return ScmInfo
+   * @throws IOException
+   */
+  @Override
+  public ScmInfo getScmInfo() throws IOException {
+    HddsProtos.GetScmInfoRequestProto request =
+        HddsProtos.GetScmInfoRequestProto.getDefaultInstance();
+    HddsProtos.GetScmInfoRespsonseProto resp;
+    try {
+      resp = rpcProxy.getScmInfo(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    ScmInfo.Builder builder = new ScmInfo.Builder()
+        .setClusterId(resp.getClusterId())
+        .setScmId(resp.getScmId());
+    return builder.build();
+  }
+
+  @Override
+  public Object getUnderlyingProxyObject() {
+    return rpcProxy;
+  }
+
+  @Override
+  public void close() {
+    RPC.stopProxy(rpcProxy);
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java
new file mode 100644
index 0000000..837c95b
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.protocolPB;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .ScmBlockLocationProtocolService;
+import org.apache.hadoop.ipc.ProtocolInfo;
+
+/**
+ * Protocol used from an HDFS node to StorageContainerManager.  This extends the
+ * Protocol Buffers service interface to add Hadoop-specific annotations.
+ */
+@ProtocolInfo(protocolName =
+    "org.apache.hadoop.ozone.protocol.ScmBlockLocationProtocol",
+    protocolVersion = 1)
+@InterfaceAudience.Private
+public interface ScmBlockLocationProtocolPB
+    extends ScmBlockLocationProtocolService.BlockingInterface {
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
new file mode 100644
index 0000000..3638f63
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
@@ -0,0 +1,316 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.protocolPB;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.GetContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.GetContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.NodeQueryRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.NodeQueryResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.PipelineRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.PipelineResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMListContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMListContainerResponseProto;
+import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.ProtocolTranslator;
+import org.apache.hadoop.ipc.RPC;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+
+/**
+ * This class is the client-side translator to translate the requests made on
+ * the {@link StorageContainerLocationProtocol} interface to the RPC server
+ * implementing {@link StorageContainerLocationProtocolPB}.
+ */
+@InterfaceAudience.Private
+public final class StorageContainerLocationProtocolClientSideTranslatorPB
+    implements StorageContainerLocationProtocol, ProtocolTranslator, Closeable {
+
+  /**
+   * RpcController is not used and hence is set to null.
+   */
+  private static final RpcController NULL_RPC_CONTROLLER = null;
+
+  private final StorageContainerLocationProtocolPB rpcProxy;
+
+  /**
+   * Creates a new StorageContainerLocationProtocolClientSideTranslatorPB.
+   *
+   * @param rpcProxy {@link StorageContainerLocationProtocolPB} RPC proxy
+   */
+  public StorageContainerLocationProtocolClientSideTranslatorPB(
+      StorageContainerLocationProtocolPB rpcProxy) {
+    this.rpcProxy = rpcProxy;
+  }
+
+  /**
+   * Asks SCM where a container should be allocated. SCM responds with the set
+   * of datanodes that should be used creating this container. Ozone/SCM only
+   * supports replication factor of either 1 or 3.
+   * @param type - Replication Type
+   * @param factor - Replication Count
+   * @param containerName - Name
+   * @return
+   * @throws IOException
+   */
+  @Override
+  public Pipeline allocateContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, String
+      containerName, String owner) throws IOException {
+
+    Preconditions.checkNotNull(containerName, "Container Name cannot be Null");
+    Preconditions.checkState(!containerName.isEmpty(), "Container name cannot" +
+        " be empty");
+    ContainerRequestProto request = ContainerRequestProto.newBuilder()
+        .setContainerName(containerName)
+        .setReplicationFactor(factor)
+        .setReplicationType(type)
+        .setOwner(owner)
+        .build();
+
+    final ContainerResponseProto response;
+    try {
+      response = rpcProxy.allocateContainer(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (response.getErrorCode() != ContainerResponseProto.Error.success) {
+      throw new IOException(response.hasErrorMessage() ?
+          response.getErrorMessage() : "Allocate container failed.");
+    }
+    return Pipeline.getFromProtoBuf(response.getPipeline());
+  }
+
+  public Pipeline getContainer(String containerName) throws IOException {
+    Preconditions.checkNotNull(containerName,
+        "Container Name cannot be Null");
+    Preconditions.checkState(!containerName.isEmpty(),
+        "Container name cannot be empty");
+    GetContainerRequestProto request = GetContainerRequestProto
+        .newBuilder()
+        .setContainerName(containerName)
+        .build();
+    try {
+      GetContainerResponseProto response =
+          rpcProxy.getContainer(NULL_RPC_CONTROLLER, request);
+      return Pipeline.getFromProtoBuf(response.getPipeline());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<ContainerInfo> listContainer(String startName, String prefixName,
+      int count) throws IOException {
+    SCMListContainerRequestProto.Builder builder = SCMListContainerRequestProto
+        .newBuilder();
+    if (prefixName != null) {
+      builder.setPrefixName(prefixName);
+    }
+    if (startName != null) {
+      builder.setStartName(startName);
+    }
+    builder.setCount(count);
+    SCMListContainerRequestProto request = builder.build();
+
+    try {
+      SCMListContainerResponseProto response =
+          rpcProxy.listContainer(NULL_RPC_CONTROLLER, request);
+      List<ContainerInfo> containerList = new ArrayList<>();
+      for (HddsProtos.SCMContainerInfo containerInfoProto : response
+          .getContainersList()) {
+        containerList.add(ContainerInfo.fromProtobuf(containerInfoProto));
+      }
+      return containerList;
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+  }
+
+  /**
+   * Ask SCM to delete a container by name. SCM will remove
+   * the container mapping in its database.
+   *
+   * @param containerName
+   * @throws IOException
+   */
+  @Override
+  public void deleteContainer(String containerName)
+      throws IOException {
+    Preconditions.checkState(!Strings.isNullOrEmpty(containerName),
+        "Container name cannot be null or empty");
+    SCMDeleteContainerRequestProto request = SCMDeleteContainerRequestProto
+        .newBuilder()
+        .setContainerName(containerName)
+        .build();
+    try {
+      rpcProxy.deleteContainer(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+  }
+
+  /**
+   * Queries a list of Node Statuses.
+   *
+   * @param nodeStatuses
+   * @return List of Datanodes.
+   */
+  @Override
+  public HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState>
+      nodeStatuses, HddsProtos.QueryScope queryScope, String poolName)
+      throws IOException {
+    // TODO : We support only cluster wide query right now. So ignoring checking
+    // queryScope and poolName
+    Preconditions.checkNotNull(nodeStatuses);
+    Preconditions.checkState(nodeStatuses.size() > 0);
+    NodeQueryRequestProto request = NodeQueryRequestProto.newBuilder()
+        .addAllQuery(nodeStatuses)
+        .setScope(queryScope).setPoolName(poolName).build();
+    try {
+      NodeQueryResponseProto response =
+          rpcProxy.queryNode(NULL_RPC_CONTROLLER, request);
+      return response.getDatanodes();
+    } catch (ServiceException e) {
+      throw  ProtobufHelper.getRemoteException(e);
+    }
+
+  }
+
+  /**
+   * Notify from client that creates object on datanodes.
+   * @param type object type
+   * @param name object name
+   * @param op operation type (e.g., create, close, delete)
+   * @param stage object creation stage : begin/complete
+   */
+  @Override
+  public void notifyObjectStageChange(
+      ObjectStageChangeRequestProto.Type type, String name,
+      ObjectStageChangeRequestProto.Op op,
+      ObjectStageChangeRequestProto.Stage stage) throws IOException {
+    Preconditions.checkState(!Strings.isNullOrEmpty(name),
+        "Object name cannot be null or empty");
+    ObjectStageChangeRequestProto request =
+        ObjectStageChangeRequestProto.newBuilder()
+            .setType(type)
+            .setName(name)
+            .setOp(op)
+            .setStage(stage)
+            .build();
+    try {
+      rpcProxy.notifyObjectStageChange(NULL_RPC_CONTROLLER, request);
+    } catch(ServiceException e){
+      throw ProtobufHelper.getRemoteException(e);
+    }
+  }
+
+  /**
+   * Creates a replication pipeline of a specified type.
+   *
+   * @param replicationType - replication type
+   * @param factor - factor 1 or 3
+   * @param nodePool - optional machine list to build a pipeline.
+   * @throws IOException
+   */
+  @Override
+  public Pipeline createReplicationPipeline(HddsProtos.ReplicationType
+      replicationType, HddsProtos.ReplicationFactor factor, HddsProtos
+      .NodePool nodePool) throws IOException {
+    PipelineRequestProto request = PipelineRequestProto.newBuilder()
+        .setNodePool(nodePool)
+        .setReplicationFactor(factor)
+        .setReplicationType(replicationType)
+        .build();
+    try {
+      PipelineResponseProto response =
+          rpcProxy.allocatePipeline(NULL_RPC_CONTROLLER, request);
+      if (response.getErrorCode() ==
+          PipelineResponseProto.Error.success) {
+        Preconditions.checkState(response.hasPipeline(), "With success, " +
+            "must come a pipeline");
+        return Pipeline.getFromProtoBuf(response.getPipeline());
+      } else {
+        String errorMessage = String.format("create replication pipeline " +
+                "failed. code : %s Message: %s", response.getErrorCode(),
+            response.hasErrorMessage() ? response.getErrorMessage() : "");
+        throw new IOException(errorMessage);
+      }
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+  }
+
+  @Override
+  public ScmInfo getScmInfo() throws IOException {
+    HddsProtos.GetScmInfoRequestProto request =
+        HddsProtos.GetScmInfoRequestProto.getDefaultInstance();
+    try {
+      HddsProtos.GetScmInfoRespsonseProto resp = rpcProxy.getScmInfo(
+          NULL_RPC_CONTROLLER, request);
+      ScmInfo.Builder builder = new ScmInfo.Builder()
+          .setClusterId(resp.getClusterId())
+          .setScmId(resp.getScmId());
+      return builder.build();
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+
+  }
+
+  @Override
+  public Object getUnderlyingProxyObject() {
+    return rpcProxy;
+  }
+
+  @Override
+  public void close() {
+    RPC.stopProxy(rpcProxy);
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java
new file mode 100644
index 0000000..f234ad3
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.protocolPB;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos
+    .StorageContainerLocationProtocolService;
+import org.apache.hadoop.ipc.ProtocolInfo;
+
+/**
+ * Protocol used from an HDFS node to StorageContainerManager.  This extends the
+ * Protocol Buffers service interface to add Hadoop-specific annotations.
+ */
+@ProtocolInfo(protocolName =
+    "org.apache.hadoop.ozone.protocol.StorageContainerLocationProtocol",
+    protocolVersion = 1)
+@InterfaceAudience.Private
+public interface StorageContainerLocationProtocolPB
+    extends StorageContainerLocationProtocolService.BlockingInterface {
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java
new file mode 100644
index 0000000..652ae60
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.protocolPB;
+
+/**
+ * This package contains classes for the client of the storage container
+ * protocol.
+ */
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java
new file mode 100644
index 0000000..1559816
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java
@@ -0,0 +1,396 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.storage;
+
+import com.google.protobuf.ByteString;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.GetKeyRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .GetKeyResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .GetSmallFileRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .GetSmallFileResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.PutKeyRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .PutSmallFileRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadChunkRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadChunkResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Type;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .WriteChunkRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue;
+
+import java.io.IOException;
+
+/**
+ * Implementation of all container protocol calls performed by Container
+ * clients.
+ */
+public final class ContainerProtocolCalls  {
+
+  /**
+   * There is no need to instantiate this class.
+   */
+  private ContainerProtocolCalls() {
+  }
+
+  /**
+   * Calls the container protocol to get a container key.
+   *
+   * @param xceiverClient client to perform call
+   * @param containerKeyData key data to identify container
+   * @param traceID container protocol call args
+   * @return container protocol get key response
+   * @throws IOException if there is an I/O error while performing the call
+   */
+  public static GetKeyResponseProto getKey(XceiverClientSpi xceiverClient,
+      KeyData containerKeyData, String traceID) throws IOException {
+    GetKeyRequestProto.Builder readKeyRequest = GetKeyRequestProto
+        .newBuilder()
+        .setPipeline(xceiverClient.getPipeline().getProtobufMessage())
+        .setKeyData(containerKeyData);
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto request = ContainerCommandRequestProto
+        .newBuilder()
+        .setCmdType(Type.GetKey)
+        .setTraceID(traceID)
+        .setDatanodeUuid(id)
+        .setGetKey(readKeyRequest)
+        .build();
+    ContainerCommandResponseProto response = xceiverClient.sendCommand(request);
+    validateContainerResponse(response);
+    return response.getGetKey();
+  }
+
+  /**
+   * Calls the container protocol to put a container key.
+   *
+   * @param xceiverClient client to perform call
+   * @param containerKeyData key data to identify container
+   * @param traceID container protocol call args
+   * @throws IOException if there is an I/O error while performing the call
+   */
+  public static void putKey(XceiverClientSpi xceiverClient,
+      KeyData containerKeyData, String traceID) throws IOException {
+    PutKeyRequestProto.Builder createKeyRequest = PutKeyRequestProto
+        .newBuilder()
+        .setPipeline(xceiverClient.getPipeline().getProtobufMessage())
+        .setKeyData(containerKeyData);
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto request = ContainerCommandRequestProto
+        .newBuilder()
+        .setCmdType(Type.PutKey)
+        .setTraceID(traceID)
+        .setDatanodeUuid(id)
+        .setPutKey(createKeyRequest)
+        .build();
+    ContainerCommandResponseProto response = xceiverClient.sendCommand(request);
+    validateContainerResponse(response);
+  }
+
+  /**
+   * Calls the container protocol to read a chunk.
+   *
+   * @param xceiverClient client to perform call
+   * @param chunk information about chunk to read
+   * @param key the key name
+   * @param traceID container protocol call args
+   * @return container protocol read chunk response
+   * @throws IOException if there is an I/O error while performing the call
+   */
+  public static ReadChunkResponseProto readChunk(XceiverClientSpi xceiverClient,
+      ChunkInfo chunk, String key, String traceID)
+      throws IOException {
+    ReadChunkRequestProto.Builder readChunkRequest = ReadChunkRequestProto
+        .newBuilder()
+        .setPipeline(xceiverClient.getPipeline().getProtobufMessage())
+        .setKeyName(key)
+        .setChunkData(chunk);
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto request = ContainerCommandRequestProto
+        .newBuilder()
+        .setCmdType(Type.ReadChunk)
+        .setTraceID(traceID)
+        .setDatanodeUuid(id)
+        .setReadChunk(readChunkRequest)
+        .build();
+    ContainerCommandResponseProto response = xceiverClient.sendCommand(request);
+    validateContainerResponse(response);
+    return response.getReadChunk();
+  }
+
+  /**
+   * Calls the container protocol to write a chunk.
+   *
+   * @param xceiverClient client to perform call
+   * @param chunk information about chunk to write
+   * @param key the key name
+   * @param data the data of the chunk to write
+   * @param traceID container protocol call args
+   * @throws IOException if there is an I/O error while performing the call
+   */
+  public static void writeChunk(XceiverClientSpi xceiverClient, ChunkInfo chunk,
+      String key, ByteString data, String traceID)
+      throws IOException {
+    WriteChunkRequestProto.Builder writeChunkRequest = WriteChunkRequestProto
+        .newBuilder()
+        .setPipeline(xceiverClient.getPipeline().getProtobufMessage())
+        .setKeyName(key)
+        .setChunkData(chunk)
+        .setData(data);
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto request = ContainerCommandRequestProto
+        .newBuilder()
+        .setCmdType(Type.WriteChunk)
+        .setTraceID(traceID)
+        .setDatanodeUuid(id)
+        .setWriteChunk(writeChunkRequest)
+        .build();
+    ContainerCommandResponseProto response = xceiverClient.sendCommand(request);
+    validateContainerResponse(response);
+  }
+
+  /**
+   * Allows writing a small file using single RPC. This takes the container
+   * name, key name and data to write sends all that data to the container using
+   * a single RPC. This API is designed to be used for files which are smaller
+   * than 1 MB.
+   *
+   * @param client - client that communicates with the container.
+   * @param containerName - Name of the container
+   * @param key - Name of the Key
+   * @param data - Data to be written into the container.
+   * @param traceID - Trace ID for logging purpose.
+   * @throws IOException
+   */
+  public static void writeSmallFile(XceiverClientSpi client,
+      String containerName, String key, byte[] data, String traceID)
+      throws IOException {
+
+    KeyData containerKeyData =
+        KeyData.newBuilder().setContainerName(containerName).setName(key)
+            .build();
+    PutKeyRequestProto.Builder createKeyRequest =
+        PutKeyRequestProto.newBuilder()
+            .setPipeline(client.getPipeline().getProtobufMessage())
+            .setKeyData(containerKeyData);
+
+    KeyValue keyValue =
+        KeyValue.newBuilder().setKey("OverWriteRequested").setValue("true")
+            .build();
+    ChunkInfo chunk =
+        ChunkInfo.newBuilder().setChunkName(key + "_chunk").setOffset(0)
+            .setLen(data.length).addMetadata(keyValue).build();
+
+    PutSmallFileRequestProto putSmallFileRequest =
+        PutSmallFileRequestProto.newBuilder().setChunkInfo(chunk)
+            .setKey(createKeyRequest).setData(ByteString.copyFrom(data))
+            .build();
+
+    String id = client.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto request =
+        ContainerCommandRequestProto.newBuilder()
+            .setCmdType(Type.PutSmallFile)
+            .setTraceID(traceID)
+            .setDatanodeUuid(id)
+            .setPutSmallFile(putSmallFileRequest)
+            .build();
+    ContainerCommandResponseProto response = client.sendCommand(request);
+    validateContainerResponse(response);
+  }
+
+  /**
+   * createContainer call that creates a container on the datanode.
+   * @param client  - client
+   * @param traceID - traceID
+   * @throws IOException
+   */
+  public static void createContainer(XceiverClientSpi client, String traceID)
+      throws IOException {
+    ContainerProtos.CreateContainerRequestProto.Builder createRequest =
+        ContainerProtos.CreateContainerRequestProto
+            .newBuilder();
+    ContainerProtos.ContainerData.Builder containerData = ContainerProtos
+        .ContainerData.newBuilder();
+    containerData.setName(client.getPipeline().getContainerName());
+    createRequest.setPipeline(client.getPipeline().getProtobufMessage());
+    createRequest.setContainerData(containerData.build());
+
+    String id = client.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.CreateContainer);
+    request.setCreateContainer(createRequest);
+    request.setDatanodeUuid(id);
+    request.setTraceID(traceID);
+    ContainerCommandResponseProto response = client.sendCommand(
+        request.build());
+    validateContainerResponse(response);
+  }
+
+  /**
+   * Deletes a container from a pipeline.
+   *
+   * @param client
+   * @param force whether or not to forcibly delete the container.
+   * @param traceID
+   * @throws IOException
+   */
+  public static void deleteContainer(XceiverClientSpi client,
+      boolean force, String traceID) throws IOException {
+    ContainerProtos.DeleteContainerRequestProto.Builder deleteRequest =
+        ContainerProtos.DeleteContainerRequestProto.newBuilder();
+    deleteRequest.setName(client.getPipeline().getContainerName());
+    deleteRequest.setPipeline(client.getPipeline().getProtobufMessage());
+    deleteRequest.setForceDelete(force);
+    String id = client.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.DeleteContainer);
+    request.setDeleteContainer(deleteRequest);
+    request.setTraceID(traceID);
+    request.setDatanodeUuid(id);
+    ContainerCommandResponseProto response =
+        client.sendCommand(request.build());
+    validateContainerResponse(response);
+  }
+
+  /**
+   * Close a container.
+   *
+   * @param client
+   * @param traceID
+   * @throws IOException
+   */
+  public static void closeContainer(XceiverClientSpi client, String traceID)
+      throws IOException {
+    ContainerProtos.CloseContainerRequestProto.Builder closeRequest =
+        ContainerProtos.CloseContainerRequestProto.newBuilder();
+    closeRequest.setPipeline(client.getPipeline().getProtobufMessage());
+
+    String id = client.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(Type.CloseContainer);
+    request.setCloseContainer(closeRequest);
+    request.setTraceID(traceID);
+    request.setDatanodeUuid(id);
+    ContainerCommandResponseProto response =
+        client.sendCommand(request.build());
+    validateContainerResponse(response);
+  }
+
+  /**
+   * readContainer call that gets meta data from an existing container.
+   *
+   * @param client - client
+   * @param traceID - trace ID
+   * @throws IOException
+   */
+  public static ReadContainerResponseProto readContainer(
+      XceiverClientSpi client, String containerName,
+      String traceID) throws IOException {
+    ReadContainerRequestProto.Builder readRequest =
+        ReadContainerRequestProto.newBuilder();
+    readRequest.setName(containerName);
+    readRequest.setPipeline(client.getPipeline().getProtobufMessage());
+    String id = client.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(Type.ReadContainer);
+    request.setReadContainer(readRequest);
+    request.setDatanodeUuid(id);
+    request.setTraceID(traceID);
+    ContainerCommandResponseProto response =
+        client.sendCommand(request.build());
+    validateContainerResponse(response);
+    return response.getReadContainer();
+  }
+
+  /**
+   * Reads the data given the container name and key.
+   *
+   * @param client
+   * @param containerName - name of the container
+   * @param key - key
+   * @param traceID - trace ID
+   * @return GetSmallFileResponseProto
+   * @throws IOException
+   */
+  public static GetSmallFileResponseProto readSmallFile(XceiverClientSpi client,
+      String containerName, String key, String traceID) throws IOException {
+    KeyData containerKeyData = KeyData
+        .newBuilder()
+        .setContainerName(containerName)
+        .setName(key).build();
+
+    GetKeyRequestProto.Builder getKey = GetKeyRequestProto
+        .newBuilder()
+        .setPipeline(client.getPipeline().getProtobufMessage())
+        .setKeyData(containerKeyData);
+    ContainerProtos.GetSmallFileRequestProto getSmallFileRequest =
+        GetSmallFileRequestProto
+            .newBuilder().setKey(getKey)
+            .build();
+    String id = client.getPipeline().getLeader().getUuidString();
+    ContainerCommandRequestProto request = ContainerCommandRequestProto
+        .newBuilder()
+        .setCmdType(Type.GetSmallFile)
+        .setTraceID(traceID)
+        .setDatanodeUuid(id)
+        .setGetSmallFile(getSmallFileRequest)
+        .build();
+    ContainerCommandResponseProto response = client.sendCommand(request);
+    validateContainerResponse(response);
+    return response.getGetSmallFile();
+  }
+
+  /**
+   * Validates a response from a container protocol call.  Any non-successful
+   * return code is mapped to a corresponding exception and thrown.
+   *
+   * @param response container protocol call response
+   * @throws IOException if the container protocol call failed
+   */
+  private static void validateContainerResponse(
+      ContainerCommandResponseProto response
+  ) throws StorageContainerException {
+    if (response.getResult() == ContainerProtos.Result.SUCCESS) {
+      return;
+    }
+    throw new StorageContainerException(
+        response.getMessage(), response.getResult());
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java
new file mode 100644
index 0000000..8e98158
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.storage;
+
+/**
+ * This package contains StorageContainerManager classes.
+ */
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java
new file mode 100644
index 0000000..ff0ac4e
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+
+package org.apache.hadoop.ozone;
+
+import java.util.Objects;
+
+/**
+ * OzoneACL classes define bucket ACLs used in OZONE.
+ *
+ * ACLs in Ozone follow this pattern.
+ * • user:name:rw
+ * • group:name:rw
+ * • world::rw
+ */
+public class OzoneAcl {
+  private OzoneACLType type;
+  private String name;
+  private OzoneACLRights rights;
+
+  /**
+   * Constructor for OzoneAcl.
+   */
+  public OzoneAcl() {
+  }
+
+  /**
+   * Constructor for OzoneAcl.
+   *
+   * @param type - Type
+   * @param name - Name of user
+   * @param rights - Rights
+   */
+  public OzoneAcl(OzoneACLType type, String name, OzoneACLRights rights) {
+    this.name = name;
+    this.rights = rights;
+    this.type = type;
+    if (type == OzoneACLType.WORLD && name.length() != 0) {
+      throw new IllegalArgumentException("Unexpected name part in world type");
+    }
+    if (((type == OzoneACLType.USER) || (type == OzoneACLType.GROUP))
+        && (name.length() == 0)) {
+      throw new IllegalArgumentException("User or group name is required");
+    }
+  }
+
+  /**
+   * Parses an ACL string and returns the ACL object.
+   *
+   * @param acl - Acl String , Ex. user:anu:rw
+   *
+   * @return - Ozone ACLs
+   */
+  public static OzoneAcl parseAcl(String acl) throws IllegalArgumentException {
+    if ((acl == null) || acl.isEmpty()) {
+      throw new IllegalArgumentException("ACLs cannot be null or empty");
+    }
+    String[] parts = acl.trim().split(":");
+    if (parts.length < 3) {
+      throw new IllegalArgumentException("ACLs are not in expected format");
+    }
+
+    OzoneACLType aclType = OzoneACLType.valueOf(parts[0].toUpperCase());
+    OzoneACLRights rights = OzoneACLRights.getACLRight(parts[2].toLowerCase());
+
+    // TODO : Support sanitation of these user names by calling into
+    // userAuth Interface.
+    return new OzoneAcl(aclType, parts[1], rights);
+  }
+
+  @Override
+  public String toString() {
+    return type + ":" + name + ":" + OzoneACLRights.getACLRightsString(rights);
+  }
+
+  /**
+   * Returns a hash code value for the object. This method is
+   * supported for the benefit of hash tables.
+   *
+   * @return a hash code value for this object.
+   *
+   * @see Object#equals(Object)
+   * @see System#identityHashCode
+   */
+  @Override
+  public int hashCode() {
+    return Objects.hash(this.getName(), this.getRights().toString(),
+                        this.getType().toString());
+  }
+
+  /**
+   * Returns name.
+   *
+   * @return name
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * Returns Rights.
+   *
+   * @return - Rights
+   */
+  public OzoneACLRights getRights() {
+    return rights;
+  }
+
+  /**
+   * Returns Type.
+   *
+   * @return type
+   */
+  public OzoneACLType getType() {
+    return type;
+  }
+
+  /**
+   * Indicates whether some other object is "equal to" this one.
+   *
+   * @param obj the reference object with which to compare.
+   *
+   * @return {@code true} if this object is the same as the obj
+   * argument; {@code false} otherwise.
+   */
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    OzoneAcl otherAcl = (OzoneAcl) obj;
+    return otherAcl.getName().equals(this.getName()) &&
+        otherAcl.getRights() == this.getRights() &&
+        otherAcl.getType() == this.getType();
+  }
+
+  /**
+   * ACL types.
+   */
+  public enum OzoneACLType {
+    USER(OzoneConsts.OZONE_ACL_USER_TYPE),
+    GROUP(OzoneConsts.OZONE_ACL_GROUP_TYPE),
+    WORLD(OzoneConsts.OZONE_ACL_WORLD_TYPE);
+
+    /**
+     * String value for this Enum.
+     */
+    private final String value;
+
+    /**
+     * Init OzoneACLtypes enum.
+     *
+     * @param val String type for this enum.
+     */
+    OzoneACLType(String val) {
+      value = val;
+    }
+  }
+
+  /**
+   * ACL rights.
+   */
+  public enum OzoneACLRights {
+    READ, WRITE, READ_WRITE;
+
+    /**
+     * Returns the ACL rights based on passed in String.
+     *
+     * @param type ACL right string
+     *
+     * @return OzoneACLRights
+     */
+    public static OzoneACLRights getACLRight(String type) {
+      if (type == null || type.isEmpty()) {
+        throw new IllegalArgumentException("ACL right cannot be empty");
+      }
+
+      switch (type) {
+      case OzoneConsts.OZONE_ACL_READ:
+        return OzoneACLRights.READ;
+      case OzoneConsts.OZONE_ACL_WRITE:
+        return OzoneACLRights.WRITE;
+      case OzoneConsts.OZONE_ACL_READ_WRITE:
+      case OzoneConsts.OZONE_ACL_WRITE_READ:
+        return OzoneACLRights.READ_WRITE;
+      default:
+        throw new IllegalArgumentException("ACL right is not recognized");
+      }
+
+    }
+
+    /**
+     * Returns String representation of ACL rights.
+     * @param acl OzoneACLRights
+     * @return String representation of acl
+     */
+    public static String getACLRightsString(OzoneACLRights acl) {
+      switch(acl) {
+      case READ:
+        return OzoneConsts.OZONE_ACL_READ;
+      case WRITE:
+        return OzoneConsts.OZONE_ACL_WRITE;
+      case READ_WRITE:
+        return OzoneConsts.OZONE_ACL_READ_WRITE;
+      default:
+        throw new IllegalArgumentException("ACL right is not recognized");
+      }
+    }
+
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
new file mode 100644
index 0000000..72531a2
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
@@ -0,0 +1,241 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+
+/**
+ * This class contains constants for configuration keys used in Ozone.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public final class OzoneConfigKeys {
+  public static final String DFS_CONTAINER_IPC_PORT =
+      "dfs.container.ipc";
+  public static final int DFS_CONTAINER_IPC_PORT_DEFAULT = 9859;
+
+  /**
+   *
+   * When set to true, allocate a random free port for ozone container,
+   * so that a mini cluster is able to launch multiple containers on a node.
+   *
+   * When set to false (default), container port is fixed as specified by
+   * DFS_CONTAINER_IPC_PORT_DEFAULT.
+   */
+  public static final String DFS_CONTAINER_IPC_RANDOM_PORT =
+      "dfs.container.ipc.random.port";
+  public static final boolean DFS_CONTAINER_IPC_RANDOM_PORT_DEFAULT =
+      false;
+
+  /**
+   * Ratis Port where containers listen to.
+   */
+  public static final String DFS_CONTAINER_RATIS_IPC_PORT =
+      "dfs.container.ratis.ipc";
+  public static final int DFS_CONTAINER_RATIS_IPC_PORT_DEFAULT = 9858;
+
+  /**
+   * When set to true, allocate a random free port for ozone container, so that
+   * a mini cluster is able to launch multiple containers on a node.
+   */
+  public static final String DFS_CONTAINER_RATIS_IPC_RANDOM_PORT =
+      "dfs.container.ratis.ipc.random.port";
+  public static final boolean DFS_CONTAINER_RATIS_IPC_RANDOM_PORT_DEFAULT =
+      false;
+
+  public static final String OZONE_LOCALSTORAGE_ROOT =
+      "ozone.localstorage.root";
+  public static final String OZONE_LOCALSTORAGE_ROOT_DEFAULT = "/tmp/ozone";
+  public static final String OZONE_ENABLED =
+      "ozone.enabled";
+  public static final boolean OZONE_ENABLED_DEFAULT = false;
+  public static final String OZONE_HANDLER_TYPE_KEY =
+      "ozone.handler.type";
+  public static final String OZONE_HANDLER_TYPE_DEFAULT = "distributed";
+  public static final String OZONE_TRACE_ENABLED_KEY =
+      "ozone.trace.enabled";
+  public static final boolean OZONE_TRACE_ENABLED_DEFAULT = false;
+
+  public static final String OZONE_METADATA_DIRS =
+      "ozone.metadata.dirs";
+
+  public static final String OZONE_METADATA_STORE_IMPL =
+      "ozone.metastore.impl";
+  public static final String OZONE_METADATA_STORE_IMPL_LEVELDB =
+      "LevelDB";
+  public static final String OZONE_METADATA_STORE_IMPL_ROCKSDB =
+      "RocksDB";
+  public static final String OZONE_METADATA_STORE_IMPL_DEFAULT =
+      OZONE_METADATA_STORE_IMPL_ROCKSDB;
+
+  public static final String OZONE_METADATA_STORE_ROCKSDB_STATISTICS =
+      "ozone.metastore.rocksdb.statistics";
+
+  public static final String  OZONE_METADATA_STORE_ROCKSDB_STATISTICS_DEFAULT =
+      "ALL";
+  public static final String OZONE_METADATA_STORE_ROCKSDB_STATISTICS_OFF =
+      "OFF";
+
+  public static final String OZONE_CONTAINER_CACHE_SIZE =
+      "ozone.container.cache.size";
+  public static final int OZONE_CONTAINER_CACHE_DEFAULT = 1024;
+
+  public static final String OZONE_SCM_BLOCK_SIZE_IN_MB =
+      "ozone.scm.block.size.in.mb";
+  public static final long OZONE_SCM_BLOCK_SIZE_DEFAULT = 256;
+
+  /**
+   * Ozone administrator users delimited by comma.
+   * If not set, only the user who launches an ozone service will be the
+   * admin user. This property must be set if ozone services are started by
+   * different users. Otherwise the RPC layer will reject calls from
+   * other servers which are started by users not in the list.
+   * */
+  public static final String OZONE_ADMINISTRATORS =
+      "ozone.administrators";
+
+  public static final String OZONE_CLIENT_PROTOCOL =
+      "ozone.client.protocol";
+
+  // This defines the overall connection limit for the connection pool used in
+  // RestClient.
+  public static final String OZONE_REST_CLIENT_HTTP_CONNECTION_MAX =
+      "ozone.rest.client.http.connection.max";
+  public static final int OZONE_REST_CLIENT_HTTP_CONNECTION_DEFAULT = 100;
+
+  // This defines the connection limit per one HTTP route/host.
+  public static final String OZONE_REST_CLIENT_HTTP_CONNECTION_PER_ROUTE_MAX =
+      "ozone.rest.client.http.connection.per-route.max";
+
+  public static final int
+      OZONE_REST_CLIENT_HTTP_CONNECTION_PER_ROUTE_MAX_DEFAULT = 20;
+
+  public static final String OZONE_CLIENT_SOCKET_TIMEOUT =
+      "ozone.client.socket.timeout";
+  public static final int OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT = 5000;
+  public static final String OZONE_CLIENT_CONNECTION_TIMEOUT =
+      "ozone.client.connection.timeout";
+  public static final int OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT = 5000;
+
+  public static final String OZONE_REPLICATION = "ozone.replication";
+  public static final int OZONE_REPLICATION_DEFAULT =
+      ReplicationFactor.THREE.getValue();
+
+  public static final String OZONE_REPLICATION_TYPE = "ozone.replication.type";
+  public static final String OZONE_REPLICATION_TYPE_DEFAULT =
+      ReplicationType.RATIS.toString();
+
+  /**
+   * Configuration property to configure the cache size of client list calls.
+   */
+  public static final String OZONE_CLIENT_LIST_CACHE_SIZE =
+      "ozone.client.list.cache";
+  public static final int OZONE_CLIENT_LIST_CACHE_SIZE_DEFAULT = 1000;
+
+  /**
+   * Configuration properties for Ozone Block Deleting Service.
+   */
+  public static final String OZONE_BLOCK_DELETING_SERVICE_INTERVAL =
+      "ozone.block.deleting.service.interval";
+  public static final String OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT
+      = "60s";
+
+  /**
+   * The interval of open key clean service.
+   */
+  public static final String OZONE_OPEN_KEY_CLEANUP_SERVICE_INTERVAL_SECONDS =
+      "ozone.open.key.cleanup.service.interval.seconds";
+  public static final int
+      OZONE_OPEN_KEY_CLEANUP_SERVICE_INTERVAL_SECONDS_DEFAULT
+      = 24 * 3600; // a total of 24 hour
+
+  /**
+   * An open key gets cleaned up when it is being in open state for too long.
+   */
+  public static final String OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS =
+      "ozone.open.key.expire.threshold";
+  public static final int OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS_DEFAULT =
+      24 * 3600;
+
+  public static final String OZONE_BLOCK_DELETING_SERVICE_TIMEOUT =
+      "ozone.block.deleting.service.timeout";
+  public static final String OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT
+      = "300s"; // 300s for default
+
+  public static final String OZONE_KEY_PREALLOCATION_MAXSIZE =
+      "ozone.key.preallocation.maxsize";
+  public static final long OZONE_KEY_PREALLOCATION_MAXSIZE_DEFAULT
+      = 128 * OzoneConsts.MB;
+
+  public static final String OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER =
+      "ozone.block.deleting.limit.per.task";
+  public static final int OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER_DEFAULT
+      = 1000;
+
+  public static final String OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL
+      = "ozone.block.deleting.container.limit.per.interval";
+  public static final int
+      OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT = 10;
+
+  public static final String OZONE_CONTAINER_REPORT_INTERVAL =
+      "ozone.container.report.interval";
+  public static final String OZONE_CONTAINER_REPORT_INTERVAL_DEFAULT =
+      "60s";
+
+  public static final String DFS_CONTAINER_RATIS_ENABLED_KEY
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY;
+  public static final boolean DFS_CONTAINER_RATIS_ENABLED_DEFAULT
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT;
+  public static final String DFS_CONTAINER_RATIS_RPC_TYPE_KEY
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_KEY;
+  public static final String DFS_CONTAINER_RATIS_RPC_TYPE_DEFAULT
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_DEFAULT;
+  public static final String DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_KEY
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_KEY;
+  public static final int DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_DEFAULT
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_DEFAULT;
+  public static final String DFS_CONTAINER_RATIS_SEGMENT_SIZE_KEY
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_SIZE_KEY;
+  public static final int DFS_CONTAINER_RATIS_SEGMENT_SIZE_DEFAULT
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_SIZE_DEFAULT;
+  public static final String DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_KEY
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_KEY;
+  public static final int DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_DEFAULT
+      = ScmConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_DEFAULT;
+  public static final int DFS_CONTAINER_CHUNK_MAX_SIZE
+      = ScmConfigKeys.OZONE_SCM_CHUNK_MAX_SIZE;
+  public static final String DFS_CONTAINER_RATIS_DATANODE_STORAGE_DIR =
+      "dfs.container.ratis.datanode.storage.dir";
+
+  public static final String OZONE_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL =
+      "ozone.web.authentication.kerberos.principal";
+
+  public static final String HDDS_DATANODE_PLUGINS_KEY =
+      "hdds.datanode.plugins";
+
+  /**
+   * There is no need to instantiate this class.
+   */
+  private OzoneConfigKeys() {
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
new file mode 100644
index 0000000..2f9e469
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Set of constants used in Ozone implementation.
+ */
+@InterfaceAudience.Private
+public final class OzoneConsts {
+
+
+  public static final String STORAGE_DIR = "scm";
+  public static final String SCM_ID = "scmUuid";
+
+  public static final String OZONE_SIMPLE_ROOT_USER = "root";
+  public static final String OZONE_SIMPLE_HDFS_USER = "hdfs";
+
+  /*
+   * BucketName length is used for both buckets and volume lengths
+   */
+  public static final int OZONE_MIN_BUCKET_NAME_LENGTH = 3;
+  public static final int OZONE_MAX_BUCKET_NAME_LENGTH = 63;
+
+  public static final String OZONE_ACL_USER_TYPE = "user";
+  public static final String OZONE_ACL_GROUP_TYPE = "group";
+  public static final String OZONE_ACL_WORLD_TYPE = "world";
+
+  public static final String OZONE_ACL_READ = "r";
+  public static final String OZONE_ACL_WRITE = "w";
+  public static final String OZONE_ACL_READ_WRITE = "rw";
+  public static final String OZONE_ACL_WRITE_READ = "wr";
+
+  public static final String OZONE_DATE_FORMAT =
+      "EEE, dd MMM yyyy HH:mm:ss zzz";
+  public static final String OZONE_TIME_ZONE = "GMT";
+
+  public static final String OZONE_COMPONENT = "component";
+  public static final String OZONE_FUNCTION  = "function";
+  public static final String OZONE_RESOURCE = "resource";
+  public static final String OZONE_USER = "user";
+  public static final String OZONE_REQUEST = "request";
+
+  public static final String CONTAINER_EXTENSION = ".container";
+  public static final String CONTAINER_META = ".meta";
+
+  //  container storage is in the following format.
+  //  Data Volume basePath/containers/<containerName>/metadata and
+  //  Data Volume basePath/containers/<containerName>/data/...
+  public static final String CONTAINER_PREFIX  = "containers";
+  public static final String CONTAINER_META_PATH = "metadata";
+  public static final String CONTAINER_DATA_PATH = "data";
+  public static final String CONTAINER_TEMPORARY_CHUNK_PREFIX = "tmp";
+  public static final String CONTAINER_CHUNK_NAME_DELIMITER = ".";
+  public static final String CONTAINER_ROOT_PREFIX = "repository";
+
+  public static final String FILE_HASH = "SHA-256";
+  public final static String CHUNK_OVERWRITE = "OverWriteRequested";
+
+  public static final int CHUNK_SIZE = 1 * 1024 * 1024; // 1 MB
+  public static final long KB = 1024L;
+  public static final long MB = KB * 1024L;
+  public static final long GB = MB * 1024L;
+  public static final long TB = GB * 1024L;
+
+  /**
+   * level DB names used by SCM and data nodes.
+   */
+  public static final String CONTAINER_DB_SUFFIX = "container.db";
+  public static final String SCM_CONTAINER_DB = "scm-" + CONTAINER_DB_SUFFIX;
+  public static final String DN_CONTAINER_DB = "-dn-"+ CONTAINER_DB_SUFFIX;
+  public static final String BLOCK_DB = "block.db";
+  public static final String NODEPOOL_DB = "nodepool.db";
+  public static final String OPEN_CONTAINERS_DB = "openContainers.db";
+  public static final String DELETED_BLOCK_DB = "deletedBlock.db";
+  public static final String KSM_DB_NAME = "ksm.db";
+
+  /**
+   * Supports Bucket Versioning.
+   */
+  public enum Versioning {NOT_DEFINED, ENABLED, DISABLED}
+
+  /**
+   * Ozone handler types.
+   */
+  public static final String OZONE_HANDLER_DISTRIBUTED = "distributed";
+  public static final String OZONE_HANDLER_LOCAL = "local";
+
+  public static final String DELETING_KEY_PREFIX = "#deleting#";
+  public static final String OPEN_KEY_PREFIX = "#open#";
+  public static final String OPEN_KEY_ID_DELIMINATOR = "#";
+
+  /**
+   * KSM LevelDB prefixes.
+   *
+   * KSM DB stores metadata as KV pairs with certain prefixes,
+   * prefix is used to improve the performance to get related
+   * metadata.
+   *
+   * KSM DB Schema:
+   *  ----------------------------------------------------------
+   *  |  KEY                                     |     VALUE   |
+   *  ----------------------------------------------------------
+   *  | $userName                                |  VolumeList |
+   *  ----------------------------------------------------------
+   *  | /#volumeName                             |  VolumeInfo |
+   *  ----------------------------------------------------------
+   *  | /#volumeName/#bucketName                 |  BucketInfo |
+   *  ----------------------------------------------------------
+   *  | /volumeName/bucketName/keyName           |  KeyInfo    |
+   *  ----------------------------------------------------------
+   *  | #deleting#/volumeName/bucketName/keyName |  KeyInfo    |
+   *  ----------------------------------------------------------
+   */
+  public static final String KSM_VOLUME_PREFIX = "/#";
+  public static final String KSM_BUCKET_PREFIX = "/#";
+  public static final String KSM_KEY_PREFIX = "/";
+  public static final String KSM_USER_PREFIX = "$";
+
+  /**
+   * Max KSM Quota size of 1024 PB.
+   */
+  public static final long MAX_QUOTA_IN_BYTES = 1024L * 1024 * TB;
+
+  /**
+   * Max number of keys returned per list buckets operation.
+   */
+  public static final int MAX_LISTBUCKETS_SIZE  = 1024;
+
+  /**
+   * Max number of keys returned per list keys operation.
+   */
+  public static final int MAX_LISTKEYS_SIZE  = 1024;
+
+  /**
+   * Max number of volumes returned per list volumes operation.
+   */
+  public static final int MAX_LISTVOLUMES_SIZE = 1024;
+
+  public static final int INVALID_PORT = -1;
+
+
+  // The ServiceListJSONServlet context attribute where KeySpaceManager
+  // instance gets stored.
+  public static final String KSM_CONTEXT_ATTRIBUTE = "ozone.ksm";
+
+  private OzoneConsts() {
+    // Never Constructed
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java
new file mode 100644
index 0000000..38ce6cc
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.common;
+
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .KeyBlocks;
+
+import java.util.List;
+
+/**
+ * A group of blocks relations relevant, e.g belong to a certain object key.
+ */
+public final class BlockGroup {
+
+  private String groupID;
+  private List<String> blockIDs;
+  private BlockGroup(String groupID, List<String> blockIDs) {
+    this.groupID = groupID;
+    this.blockIDs = blockIDs;
+  }
+
+  public List<String> getBlockIDList() {
+    return blockIDs;
+  }
+
+  public String getGroupID() {
+    return groupID;
+  }
+
+  public KeyBlocks getProto() {
+    return KeyBlocks.newBuilder().setKey(groupID)
+        .addAllBlocks(blockIDs).build();
+  }
+
+  /**
+   * Parses a KeyBlocks proto to a group of blocks.
+   * @param proto KeyBlocks proto.
+   * @return a group of blocks.
+   */
+  public static BlockGroup getFromProto(KeyBlocks proto) {
+    return BlockGroup.newBuilder().setKeyName(proto.getKey())
+        .addAllBlockIDs(proto.getBlocksList()).build();
+  }
+
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * BlockGroup instance builder.
+   */
+  public static class Builder {
+
+    private String groupID;
+    private List<String> blockIDs;
+
+    public Builder setKeyName(String blockGroupID) {
+      this.groupID = blockGroupID;
+      return this;
+    }
+
+    public Builder addAllBlockIDs(List<String> keyBlocks) {
+      this.blockIDs = keyBlocks;
+      return this;
+    }
+
+    public BlockGroup build() {
+      return new BlockGroup(groupID, blockIDs);
+    }
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java
new file mode 100644
index 0000000..ec54ac5
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.common;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.DeleteBlockResult;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmBlockResult;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmBlockResult.Result;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Result to delete a group of blocks.
+ */
+public class DeleteBlockGroupResult {
+  private String objectKey;
+  private List<DeleteBlockResult> blockResultList;
+  public DeleteBlockGroupResult(String objectKey,
+      List<DeleteBlockResult> blockResultList) {
+    this.objectKey = objectKey;
+    this.blockResultList = blockResultList;
+  }
+
+  public String getObjectKey() {
+    return objectKey;
+  }
+
+  public List<DeleteBlockResult> getBlockResultList() {
+    return blockResultList;
+  }
+
+  public List<DeleteScmBlockResult> getBlockResultProtoList() {
+    List<DeleteScmBlockResult> resultProtoList =
+        new ArrayList<>(blockResultList.size());
+    for (DeleteBlockResult result : blockResultList) {
+      DeleteScmBlockResult proto = DeleteScmBlockResult.newBuilder()
+          .setKey(result.getKey())
+          .setResult(result.getResult()).build();
+      resultProtoList.add(proto);
+    }
+    return resultProtoList;
+  }
+
+  public static List<DeleteBlockResult> convertBlockResultProto(
+      List<DeleteScmBlockResult> results) {
+    List<DeleteBlockResult> protoResults = new ArrayList<>(results.size());
+    for (DeleteScmBlockResult result : results) {
+      protoResults.add(new DeleteBlockResult(result.getKey(),
+          result.getResult()));
+    }
+    return protoResults;
+  }
+
+  /**
+   * Only if all blocks are successfully deleted, this group is considered
+   * to be successfully executed.
+   *
+   * @return true if all blocks are successfully deleted, false otherwise.
+   */
+  public boolean isSuccess() {
+    for (DeleteBlockResult result : blockResultList) {
+      if (result.getResult() != Result.success) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  /**
+   * @return A list of deletion failed block IDs.
+   */
+  public List<String> getFailedBlocks() {
+    List<String> failedBlocks = blockResultList.stream()
+        .filter(result -> result.getResult() != Result.success)
+        .map(DeleteBlockResult::getKey).collect(Collectors.toList());
+    return failedBlocks;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java
new file mode 100644
index 0000000..518b519
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java
@@ -0,0 +1,51 @@
+/**
+  * Licensed to the Apache Software Foundation (ASF) under one
+  * or more contributor license agreements.  See the NOTICE file
+  * distributed with this work for additional information
+  * regarding copyright ownership.  The ASF licenses this file
+  * to you under the Apache License, Version 2.0 (the
+  * "License"); you may not use this file except in compliance
+  * with the License.  You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.hadoop.ozone.common;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * The exception is thrown when file system state is inconsistent
+ * and is not recoverable.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class InconsistentStorageStateException extends IOException {
+  private static final long serialVersionUID = 1L;
+
+  public InconsistentStorageStateException(String descr) {
+    super(descr);
+  }
+
+  public InconsistentStorageStateException(File dir, String descr) {
+    super("Directory " + getFilePath(dir) + " is in an inconsistent state: "
+        + descr);
+  }
+
+  private static String getFilePath(File dir) {
+    try {
+      return dir.getCanonicalPath();
+    } catch (IOException e) {
+    }
+    return dir.getPath();
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java
new file mode 100644
index 0000000..fb30d92
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.common;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Properties;
+
+/**
+ * Storage information file. This Class defines the methods to check
+ * the consistency of the storage dir and the version file.
+ * <p>
+ * Local storage information is stored in a separate file VERSION.
+ * It contains type of the node,
+ * the storage layout version, the SCM id, and
+ * the KSM/SCM state creation time.
+ *
+ */
+@InterfaceAudience.Private
+public abstract class Storage {
+  private static final Logger LOG = LoggerFactory.getLogger(Storage.class);
+
+  protected static final String STORAGE_DIR_CURRENT = "current";
+  protected static final String STORAGE_FILE_VERSION = "VERSION";
+
+  private final NodeType nodeType;
+  private final File root;
+  private final File storageDir;
+
+  private StorageState state;
+  private StorageInfo storageInfo;
+
+
+  /**
+   * Determines the state of the Version file.
+   */
+  public enum StorageState {
+    NON_EXISTENT, NOT_INITIALIZED, INITIALIZED
+  }
+
+  public Storage(NodeType type, File root, String sdName)
+      throws IOException {
+    this.nodeType = type;
+    this.root = root;
+    this.storageDir = new File(root, sdName);
+    this.state = getStorageState();
+    if (state == StorageState.INITIALIZED) {
+      this.storageInfo = new StorageInfo(type, getVersionFile());
+    } else {
+      this.storageInfo = new StorageInfo(
+          nodeType, StorageInfo.newClusterID(), Time.now());
+      setNodeProperties();
+    }
+  }
+
+  /**
+   * Gets the path of the Storage dir.
+   * @return Stoarge dir path
+   */
+  public String getStorageDir() {
+    return storageDir.getAbsoluteFile().toString();
+  }
+
+  /**
+   * Gets the state of the version file.
+   * @return the state of the Version file
+   */
+  public StorageState getState() {
+    return state;
+  }
+
+  public NodeType getNodeType() {
+    return storageInfo.getNodeType();
+  }
+
+  public String getClusterID() {
+    return storageInfo.getClusterID();
+  }
+
+  public long getCreationTime() {
+    return storageInfo.getCreationTime();
+  }
+
+  public void setClusterId(String clusterId) throws IOException {
+    if (state == StorageState.INITIALIZED) {
+      throw new IOException(
+          "Storage directory " + storageDir + " already initialized.");
+    } else {
+      storageInfo.setClusterId(clusterId);
+    }
+  }
+
+  /**
+   * Retreives the storageInfo instance to read/write the common
+   * version file properties.
+   * @return the instance of the storageInfo class
+   */
+  protected StorageInfo getStorageInfo() {
+    return storageInfo;
+  }
+
+  abstract protected Properties getNodeProperties();
+
+  /**
+   * Sets the Node properties spaecific to KSM/SCM.
+   */
+  private void setNodeProperties() {
+    Properties nodeProperties = getNodeProperties();
+    if (nodeProperties != null) {
+      for (String key : nodeProperties.stringPropertyNames()) {
+        storageInfo.setProperty(key, nodeProperties.getProperty(key));
+      }
+    }
+  }
+
+  /**
+   * Directory {@code current} contains latest files defining
+   * the file system meta-data.
+   *
+   * @return the directory path
+   */
+  private File getCurrentDir() {
+    return new File(storageDir, STORAGE_DIR_CURRENT);
+  }
+
+  /**
+   * File {@code VERSION} contains the following fields:
+   * <ol>
+   * <li>node type</li>
+   * <li>KSM/SCM state creation time</li>
+   * <li>other fields specific for this node type</li>
+   * </ol>
+   * The version file is always written last during storage directory updates.
+   * The existence of the version file indicates that all other files have
+   * been successfully written in the storage directory, the storage is valid
+   * and does not need to be recovered.
+   *
+   * @return the version file path
+   */
+  private File getVersionFile() {
+    return new File(getCurrentDir(), STORAGE_FILE_VERSION);
+  }
+
+
+  /**
+   * Check to see if current/ directory is empty. This method is used
+   * before determining to format the directory.
+   * @throws IOException if unable to list files under the directory.
+   */
+  private void checkEmptyCurrent() throws IOException {
+    File currentDir = getCurrentDir();
+    if (!currentDir.exists()) {
+      // if current/ does not exist, it's safe to format it.
+      return;
+    }
+    try (DirectoryStream<Path> dirStream = Files
+        .newDirectoryStream(currentDir.toPath())) {
+      if (dirStream.iterator().hasNext()) {
+        throw new InconsistentStorageStateException(getCurrentDir(),
+            "Can't initialize the storage directory because the current "
+                + "it is not empty.");
+      }
+    }
+  }
+
+  /**
+   * Check consistency of the storage directory.
+   *
+   * @return state {@link StorageState} of the storage directory
+   * @throws IOException
+   */
+  private StorageState getStorageState() throws IOException {
+    assert root != null : "root is null";
+    String rootPath = root.getCanonicalPath();
+    try { // check that storage exists
+      if (!root.exists()) {
+        // storage directory does not exist
+        LOG.warn("Storage directory " + rootPath + " does not exist");
+        return StorageState.NON_EXISTENT;
+      }
+      // or is inaccessible
+      if (!root.isDirectory()) {
+        LOG.warn(rootPath + "is not a directory");
+        return StorageState.NON_EXISTENT;
+      }
+      if (!FileUtil.canWrite(root)) {
+        LOG.warn("Cannot access storage directory " + rootPath);
+        return StorageState.NON_EXISTENT;
+      }
+    } catch (SecurityException ex) {
+      LOG.warn("Cannot access storage directory " + rootPath, ex);
+      return StorageState.NON_EXISTENT;
+    }
+
+    // check whether current directory is valid
+    File versionFile = getVersionFile();
+    boolean hasCurrent = versionFile.exists();
+
+    if (hasCurrent) {
+      return StorageState.INITIALIZED;
+    } else {
+      checkEmptyCurrent();
+      return StorageState.NOT_INITIALIZED;
+    }
+  }
+
+  /**
+   * Creates the Version file if not present,
+   * otherwise returns with IOException.
+   * @throws IOException
+   */
+  public void initialize() throws IOException {
+    if (state == StorageState.INITIALIZED) {
+      throw new IOException("Storage directory already initialized.");
+    }
+    if (!getCurrentDir().mkdirs()) {
+      throw new IOException("Cannot create directory " + getCurrentDir());
+    }
+    storageInfo.writeTo(getVersionFile());
+  }
+
+}
+
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
new file mode 100644
index 0000000..0e98a4c
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.common;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.Properties;
+import java.util.UUID;
+
+/**
+ * Common class for storage information. This class defines the common
+ * properties and functions to set them , write them into the version file
+ * and read them from the version file.
+ *
+ */
+@InterfaceAudience.Private
+public class StorageInfo {
+
+  private Properties properties = new Properties();
+
+  /**
+   * Property to hold node type.
+   */
+  private static final String NODE_TYPE = "nodeType";
+  /**
+   * Property to hold ID of the cluster.
+   */
+  private static final String CLUSTER_ID = "clusterID";
+  /**
+   * Property to hold creation time of the storage.
+   */
+  private static final String CREATION_TIME = "cTime";
+
+  /**
+   * Constructs StorageInfo instance.
+   * @param type
+   *          Type of the node using the storage
+   * @param cid
+   *          Cluster ID
+   * @param cT
+   *          Cluster creation Time
+
+   * @throws IOException
+   */
+  public StorageInfo(NodeType type, String cid, long cT)
+      throws IOException {
+    Preconditions.checkNotNull(type);
+    Preconditions.checkNotNull(cid);
+    Preconditions.checkNotNull(cT);
+    properties.setProperty(NODE_TYPE, type.name());
+    properties.setProperty(CLUSTER_ID, cid);
+    properties.setProperty(CREATION_TIME, String.valueOf(cT));
+  }
+
+  public StorageInfo(NodeType type, File propertiesFile)
+      throws IOException {
+    this.properties = readFrom(propertiesFile);
+    verifyNodeType(type);
+    verifyClusterId();
+    verifyCreationTime();
+  }
+
+  public NodeType getNodeType() {
+    return NodeType.valueOf(properties.getProperty(NODE_TYPE));
+  }
+
+  public String getClusterID() {
+    return properties.getProperty(CLUSTER_ID);
+  }
+
+  public Long  getCreationTime() {
+    String creationTime = properties.getProperty(CREATION_TIME);
+    if(creationTime != null) {
+      return Long.parseLong(creationTime);
+    }
+    return null;
+  }
+
+  public String getProperty(String key) {
+    return properties.getProperty(key);
+  }
+
+  public void setProperty(String key, String value) {
+    properties.setProperty(key, value);
+  }
+
+  public void setClusterId(String clusterId) {
+    properties.setProperty(CLUSTER_ID, clusterId);
+  }
+
+  private void verifyNodeType(NodeType type)
+      throws InconsistentStorageStateException {
+    NodeType nodeType = getNodeType();
+    Preconditions.checkNotNull(nodeType);
+    if(type != nodeType) {
+      throw new InconsistentStorageStateException("Expected NodeType: " + type +
+          ", but found: " + nodeType);
+    }
+  }
+
+  private void verifyClusterId()
+      throws InconsistentStorageStateException {
+    String clusterId = getClusterID();
+    Preconditions.checkNotNull(clusterId);
+    if(clusterId.isEmpty()) {
+      throw new InconsistentStorageStateException("Cluster ID not found");
+    }
+  }
+
+  private void verifyCreationTime() {
+    Long creationTime = getCreationTime();
+    Preconditions.checkNotNull(creationTime);
+  }
+
+
+  public void writeTo(File to)
+      throws IOException {
+    try (RandomAccessFile file = new RandomAccessFile(to, "rws");
+         FileOutputStream out = new FileOutputStream(file.getFD())) {
+      file.seek(0);
+    /*
+     * If server is interrupted before this line,
+     * the version file will remain unchanged.
+     */
+      properties.store(out, null);
+    /*
+     * Now the new fields are flushed to the head of the file, but file
+     * length can still be larger then required and therefore the file can
+     * contain whole or corrupted fields from its old contents in the end.
+     * If server is interrupted here and restarted later these extra fields
+     * either should not effect server behavior or should be handled
+     * by the server correctly.
+     */
+      file.setLength(out.getChannel().position());
+    }
+  }
+
+  private Properties readFrom(File from) throws IOException {
+    try (RandomAccessFile file = new RandomAccessFile(from, "rws");
+        FileInputStream in = new FileInputStream(file.getFD())) {
+      Properties props = new Properties();
+      file.seek(0);
+      props.load(in);
+      return props;
+    }
+  }
+
+  /**
+   * Generate new clusterID.
+   *
+   * clusterID is a persistent attribute of the cluster.
+   * It is generated when the cluster is created and remains the same
+   * during the life cycle of the cluster.  When a new SCM node is initialized,
+   * if this is a new cluster, a new clusterID is generated and stored.
+   * @return new clusterID
+   */
+  public static String newClusterID() {
+    return "CID-" + UUID.randomUUID().toString();
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java
new file mode 100644
index 0000000..6517e58
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.common;
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java
new file mode 100644
index 0000000..9aeff24
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.common.statemachine;
+
+/**
+ * Class wraps invalid state transition exception.
+ */
+public class InvalidStateTransitionException extends Exception {
+  private Enum<?> currentState;
+  private Enum<?> event;
+
+  public InvalidStateTransitionException(Enum<?> currentState, Enum<?> event) {
+    super("Invalid event: " + event + " at " + currentState + " state.");
+    this.currentState = currentState;
+    this.event = event;
+  }
+
+  public Enum<?> getCurrentState() {
+    return currentState;
+  }
+
+  public Enum<?> getEvent() {
+    return event;
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java
new file mode 100644
index 0000000..bf8cbd5
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.common.statemachine;
+
+import com.google.common.base.Supplier;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Template class that wraps simple event driven state machine.
+ * @param <STATE> states allowed
+ * @param <EVENT> events allowed
+ */
+public class StateMachine<STATE extends Enum<?>, EVENT extends Enum<?>> {
+  private STATE initialState;
+  private Set<STATE> finalStates;
+
+  private final LoadingCache<EVENT, Map<STATE, STATE>> transitions =
+      CacheBuilder.newBuilder().build(
+          CacheLoader.from((Supplier<Map<STATE, STATE>>) () -> new HashMap()));
+
+  public StateMachine(STATE initState, Set<STATE> finalStates) {
+    this.initialState = initState;
+    this.finalStates = finalStates;
+  }
+
+  public STATE getInitialState() {
+    return initialState;
+  }
+
+  public Set<STATE> getFinalStates() {
+    return finalStates;
+  }
+
+  public STATE getNextState(STATE from, EVENT e)
+      throws InvalidStateTransitionException {
+    STATE target = transitions.getUnchecked(e).get(from);
+    if (target == null) {
+      throw new InvalidStateTransitionException(from, e);
+    }
+    return target;
+  }
+
+  public void addTransition(STATE from, STATE to, EVENT e) {
+    transitions.getUnchecked(e).put(from, to);
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java
new file mode 100644
index 0000000..045409e3e
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.common.statemachine;
+/**
+ state machine template class for ozone.
+ **/
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java
new file mode 100644
index 0000000..aa1fe74
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkInfo.java
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Java class that represents ChunkInfo ProtoBuf class. This helper class allows
+ * us to convert to and from protobuf to normal java.
+ */
+public class ChunkInfo {
+  private final String chunkName;
+  private final long offset;
+  private final long len;
+  private String checksum;
+  private final Map<String, String> metadata;
+
+
+  /**
+   * Constructs a ChunkInfo.
+   *
+   * @param chunkName - File Name where chunk lives.
+   * @param offset    - offset where Chunk Starts.
+   * @param len       - Length of the Chunk.
+   */
+  public ChunkInfo(String chunkName, long offset, long len) {
+    this.chunkName = chunkName;
+    this.offset = offset;
+    this.len = len;
+    this.metadata = new TreeMap<>();
+  }
+
+  /**
+   * Adds metadata.
+   *
+   * @param key   - Key Name.
+   * @param value - Value.
+   * @throws IOException
+   */
+  public void addMetadata(String key, String value) throws IOException {
+    synchronized (this.metadata) {
+      if (this.metadata.containsKey(key)) {
+        throw new IOException("This key already exists. Key " + key);
+      }
+      metadata.put(key, value);
+    }
+  }
+
+  /**
+   * Gets a Chunkinfo class from the protobuf definitions.
+   *
+   * @param info - Protobuf class
+   * @return ChunkInfo
+   * @throws IOException
+   */
+  public static ChunkInfo getFromProtoBuf(ContainerProtos.ChunkInfo info)
+      throws IOException {
+    Preconditions.checkNotNull(info);
+
+    ChunkInfo chunkInfo = new ChunkInfo(info.getChunkName(), info.getOffset(),
+        info.getLen());
+
+    for (int x = 0; x < info.getMetadataCount(); x++) {
+      chunkInfo.addMetadata(info.getMetadata(x).getKey(),
+          info.getMetadata(x).getValue());
+    }
+
+
+    if (info.hasChecksum()) {
+      chunkInfo.setChecksum(info.getChecksum());
+    }
+    return chunkInfo;
+  }
+
+  /**
+   * Returns a ProtoBuf Message from ChunkInfo.
+   *
+   * @return Protocol Buffer Message
+   */
+  public ContainerProtos.ChunkInfo getProtoBufMessage() {
+    ContainerProtos.ChunkInfo.Builder builder = ContainerProtos
+        .ChunkInfo.newBuilder();
+
+    builder.setChunkName(this.getChunkName());
+    builder.setOffset(this.getOffset());
+    builder.setLen(this.getLen());
+    if (this.getChecksum() != null && !this.getChecksum().isEmpty()) {
+      builder.setChecksum(this.getChecksum());
+    }
+
+    for (Map.Entry<String, String> entry : metadata.entrySet()) {
+      HddsProtos.KeyValue.Builder keyValBuilder =
+          HddsProtos.KeyValue.newBuilder();
+      builder.addMetadata(keyValBuilder.setKey(entry.getKey())
+          .setValue(entry.getValue()).build());
+    }
+
+    return builder.build();
+  }
+
+  /**
+   * Returns the chunkName.
+   *
+   * @return - String
+   */
+  public String getChunkName() {
+    return chunkName;
+  }
+
+  /**
+   * Gets the start offset of the given chunk in physical file.
+   *
+   * @return - long
+   */
+  public long getOffset() {
+    return offset;
+  }
+
+  /**
+   * Returns the length of the Chunk.
+   *
+   * @return long
+   */
+  public long getLen() {
+    return len;
+  }
+
+  /**
+   * Returns the SHA256 value of this chunk.
+   *
+   * @return - Hash String
+   */
+  public String getChecksum() {
+    return checksum;
+  }
+
+  /**
+   * Sets the Hash value of this chunk.
+   *
+   * @param checksum - Hash String.
+   */
+  public void setChecksum(String checksum) {
+    this.checksum = checksum;
+  }
+
+  /**
+   * Returns Metadata associated with this Chunk.
+   *
+   * @return - Map of Key,values.
+   */
+  public Map<String, String> getMetadata() {
+    return metadata;
+  }
+
+  @Override
+  public String toString() {
+    return "ChunkInfo{" +
+        "chunkName='" + chunkName +
+        ", offset=" + offset +
+        ", len=" + len +
+        '}';
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java
new file mode 100644
index 0000000..be546c75
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Helper class to convert Protobuf to Java classes.
+ */
+public class KeyData {
+  private final String containerName;
+  private final String keyName;
+  private final Map<String, String> metadata;
+
+  /**
+   * Please note : when we are working with keys, we don't care what they point
+   * to. So we We don't read chunkinfo nor validate them. It is responsibility
+   * of higher layer like ozone. We just read and write data from network.
+   */
+  private List<ContainerProtos.ChunkInfo> chunks;
+
+  /**
+   * Constructs a KeyData Object.
+   *
+   * @param containerName
+   * @param keyName
+   */
+  public KeyData(String containerName, String keyName) {
+    this.containerName = containerName;
+    this.keyName = keyName;
+    this.metadata = new TreeMap<>();
+  }
+
+  /**
+   * Returns a keyData object from the protobuf data.
+   *
+   * @param data - Protobuf data.
+   * @return - KeyData
+   * @throws IOException
+   */
+  public static KeyData getFromProtoBuf(ContainerProtos.KeyData data) throws
+      IOException {
+    KeyData keyData = new KeyData(data.getContainerName(), data.getName());
+    for (int x = 0; x < data.getMetadataCount(); x++) {
+      keyData.addMetadata(data.getMetadata(x).getKey(),
+          data.getMetadata(x).getValue());
+    }
+    keyData.setChunks(data.getChunksList());
+    return keyData;
+  }
+
+  /**
+   * Returns a Protobuf message from KeyData.
+   * @return Proto Buf Message.
+   */
+  public ContainerProtos.KeyData getProtoBufMessage() {
+    ContainerProtos.KeyData.Builder builder =
+        ContainerProtos.KeyData.newBuilder();
+    builder.setContainerName(this.containerName);
+    builder.setName(this.getKeyName());
+    builder.addAllChunks(this.chunks);
+    for (Map.Entry<String, String> entry : metadata.entrySet()) {
+      HddsProtos.KeyValue.Builder keyValBuilder =
+          HddsProtos.KeyValue.newBuilder();
+      builder.addMetadata(keyValBuilder.setKey(entry.getKey())
+          .setValue(entry.getValue()).build());
+    }
+    return builder.build();
+  }
+
+  /**
+   * Adds metadata.
+   *
+   * @param key   - Key
+   * @param value - Value
+   * @throws IOException
+   */
+  public synchronized void addMetadata(String key, String value) throws
+      IOException {
+    if (this.metadata.containsKey(key)) {
+      throw new IOException("This key already exists. Key " + key);
+    }
+    metadata.put(key, value);
+  }
+
+  public synchronized Map<String, String> getMetadata() {
+    return Collections.unmodifiableMap(this.metadata);
+  }
+
+  /**
+   * Returns value of a key.
+   */
+  public synchronized String getValue(String key) {
+    return metadata.get(key);
+  }
+
+  /**
+   * Deletes a metadata entry from the map.
+   *
+   * @param key - Key
+   */
+  public synchronized void deleteKey(String key) {
+    metadata.remove(key);
+  }
+
+  /**
+   * Returns chunks list.
+   *
+   * @return list of chunkinfo.
+   */
+  public List<ContainerProtos.ChunkInfo> getChunks() {
+    return chunks;
+  }
+
+  /**
+   * Returns container Name.
+   * @return String.
+   */
+  public String getContainerName() {
+    return containerName;
+  }
+
+  /**
+   * Returns KeyName.
+   * @return String.
+   */
+  public String getKeyName() {
+    return keyName;
+  }
+
+  /**
+   * Sets Chunk list.
+   *
+   * @param chunks - List of chunks.
+   */
+  public void setChunks(List<ContainerProtos.ChunkInfo> chunks) {
+    this.chunks = chunks;
+  }
+
+  /**
+   * Get the total size of chunks allocated for the key.
+   * @return total size of the key.
+   */
+  public long getSize() {
+    return chunks.parallelStream().mapToLong(e->e.getLen()).sum();
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java
new file mode 100644
index 0000000..fa5df11
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.helpers;
+
+/**
+ * Helper classes for the container protocol communication.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java
new file mode 100644
index 0000000..dfa9315
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/Lease.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+import org.apache.hadoop.util.Time;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+/**
+ * This class represents the lease created on a resource. Callback can be
+ * registered on the lease which will be executed in case of timeout.
+ *
+ * @param <T> Resource type for which the lease can be associated
+ */
+public class Lease<T> {
+
+  /**
+   * The resource for which this lease is created.
+   */
+  private final T resource;
+
+  private final long creationTime;
+
+  /**
+   * Lease lifetime in milliseconds.
+   */
+  private volatile long leaseTimeout;
+
+  private boolean expired;
+
+  /**
+   * Functions to be called in case of timeout.
+   */
+  private List<Callable<Void>> callbacks;
+
+
+  /**
+   * Creates a lease on the specified resource with given timeout.
+   *
+   * @param resource
+   *        Resource for which the lease has to be created
+   * @param timeout
+   *        Lease lifetime in milliseconds
+   */
+  public Lease(T resource, long timeout) {
+    this.resource = resource;
+    this.leaseTimeout = timeout;
+    this.callbacks = Collections.synchronizedList(new ArrayList<>());
+    this.creationTime = Time.monotonicNow();
+    this.expired = false;
+  }
+
+  /**
+   * Returns true if the lease has expired, else false.
+   *
+   * @return true if expired, else false
+   */
+  public boolean hasExpired() {
+    return expired;
+  }
+
+  /**
+   * Registers a callback which will be executed in case of timeout. Callbacks
+   * are executed in a separate Thread.
+   *
+   * @param callback
+   *        The Callable which has to be executed
+   * @throws LeaseExpiredException
+   *         If the lease has already timed out
+   */
+  public void registerCallBack(Callable<Void> callback)
+      throws LeaseExpiredException {
+    if(hasExpired()) {
+      throw new LeaseExpiredException("Resource: " + resource);
+    }
+    callbacks.add(callback);
+  }
+
+  /**
+   * Returns the time elapsed since the creation of lease.
+   *
+   * @return elapsed time in milliseconds
+   * @throws LeaseExpiredException
+   *         If the lease has already timed out
+   */
+  public long getElapsedTime() throws LeaseExpiredException {
+    if(hasExpired()) {
+      throw new LeaseExpiredException("Resource: " + resource);
+    }
+    return Time.monotonicNow() - creationTime;
+  }
+
+  /**
+   * Returns the time available before timeout.
+   *
+   * @return remaining time in milliseconds
+   * @throws LeaseExpiredException
+   *         If the lease has already timed out
+   */
+  public long getRemainingTime() throws LeaseExpiredException {
+    if(hasExpired()) {
+      throw new LeaseExpiredException("Resource: " + resource);
+    }
+    return leaseTimeout - getElapsedTime();
+  }
+
+  /**
+   * Returns total lease lifetime.
+   *
+   * @return total lifetime of lease in milliseconds
+   * @throws LeaseExpiredException
+   *         If the lease has already timed out
+   */
+  public long getLeaseLifeTime() throws LeaseExpiredException {
+    if(hasExpired()) {
+      throw new LeaseExpiredException("Resource: " + resource);
+    }
+    return leaseTimeout;
+  }
+
+  /**
+   * Renews the lease timeout period.
+   *
+   * @param timeout
+   *        Time to be added to the lease in milliseconds
+   * @throws LeaseExpiredException
+   *         If the lease has already timed out
+   */
+  public void renew(long timeout) throws LeaseExpiredException {
+    if(hasExpired()) {
+      throw new LeaseExpiredException("Resource: " + resource);
+    }
+    leaseTimeout += timeout;
+  }
+
+  @Override
+  public int hashCode() {
+    return resource.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if(obj instanceof Lease) {
+      return resource.equals(((Lease) obj).resource);
+    }
+    return false;
+  }
+
+  @Override
+  public String toString() {
+    return "Lease<" + resource.toString() + ">";
+  }
+
+  /**
+   * Returns the callbacks to be executed for the lease in case of timeout.
+   *
+   * @return callbacks to be executed
+   */
+  List<Callable<Void>> getCallbacks() {
+    return callbacks;
+  }
+
+  /**
+   * Expires/Invalidates the lease.
+   */
+  void invalidate() {
+    callbacks = null;
+    expired = true;
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java
new file mode 100644
index 0000000..a39ea22
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseAlreadyExistException.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+/**
+ * This exception represents that there is already a lease acquired on the
+ * same resource.
+ */
+public class LeaseAlreadyExistException  extends LeaseException {
+
+  /**
+   * Constructs an {@code LeaseAlreadyExistException} with {@code null}
+   * as its error detail message.
+   */
+  public LeaseAlreadyExistException() {
+    super();
+  }
+
+  /**
+   * Constructs an {@code LeaseAlreadyExistException} with the specified
+   * detail message.
+   *
+   * @param message
+   *        The detail message (which is saved for later retrieval
+   *        by the {@link #getMessage()} method)
+   */
+  public LeaseAlreadyExistException(String message) {
+    super(message);
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java
new file mode 100644
index 0000000..1b7391b
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseCallbackExecutor.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.Callable;
+
+/**
+ * This class is responsible for executing the callbacks of a lease in case of
+ * timeout.
+ */
+public class LeaseCallbackExecutor<T> implements Runnable {
+
+  private static final Logger LOG = LoggerFactory.getLogger(Lease.class);
+
+  private final T resource;
+  private final List<Callable<Void>> callbacks;
+
+  /**
+   * Constructs LeaseCallbackExecutor instance with list of callbacks.
+   *
+   * @param resource
+   *        The resource for which the callbacks are executed
+   * @param callbacks
+   *        Callbacks to be executed by this executor
+   */
+  public LeaseCallbackExecutor(T resource, List<Callable<Void>> callbacks) {
+    this.resource = resource;
+    this.callbacks = callbacks;
+  }
+
+  @Override
+  public void run() {
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Executing callbacks for lease on {}", resource);
+    }
+    for(Callable<Void> callback : callbacks) {
+      try {
+        callback.call();
+      } catch (Exception e) {
+        LOG.warn("Exception while executing callback for lease on {}",
+            resource, e);
+      }
+    }
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java
new file mode 100644
index 0000000..418f412
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseException.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+/**
+ * This exception represents all lease related exceptions.
+ */
+public class LeaseException extends Exception {
+
+  /**
+   * Constructs an {@code LeaseException} with {@code null}
+   * as its error detail message.
+   */
+  public LeaseException() {
+    super();
+  }
+
+  /**
+   * Constructs an {@code LeaseException} with the specified
+   * detail message.
+   *
+   * @param message
+   *        The detail message (which is saved for later retrieval
+   *        by the {@link #getMessage()} method)
+   */
+  public LeaseException(String message) {
+    super(message);
+  }
+
+}
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java
new file mode 100644
index 0000000..440a023
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseExpiredException.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+/**
+ * This exception represents that the lease that is being accessed has expired.
+ */
+public class LeaseExpiredException extends LeaseException {
+
+  /**
+   * Constructs an {@code LeaseExpiredException} with {@code null}
+   * as its error detail message.
+   */
+  public LeaseExpiredException() {
+    super();
+  }
+
+  /**
+   * Constructs an {@code LeaseExpiredException} with the specified
+   * detail message.
+   *
+   * @param message
+   *        The detail message (which is saved for later retrieval
+   *        by the {@link #getMessage()} method)
+   */
+  public LeaseExpiredException(String message) {
+    super(message);
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java
new file mode 100644
index 0000000..b8390dd
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManager.java
@@ -0,0 +1,247 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+/**
+ * LeaseManager is someone who can provide you leases based on your
+ * requirement. If you want to return the lease back before it expires,
+ * you can give it back to Lease Manager. He is the one responsible for
+ * the lifecycle of leases. The resource for which lease is created
+ * should have proper {@code equals} method implementation, resource
+ * equality is checked while the lease is created.
+ *
+ * @param <T> Type of leases that this lease manager can create
+ */
+public class LeaseManager<T> {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(LeaseManager.class);
+
+  private final long defaultTimeout;
+  private Map<T, Lease<T>> activeLeases;
+  private LeaseMonitor leaseMonitor;
+  private Thread leaseMonitorThread;
+  private boolean isRunning;
+
+  /**
+   * Creates an instance of lease manager.
+   *
+   * @param defaultTimeout
+   *        Default timeout in milliseconds to be used for lease creation.
+   */
+  public LeaseManager(long defaultTimeout) {
+    this.defaultTimeout = defaultTimeout;
+  }
+
+  /**
+   * Starts the lease manager service.
+   */
+  public void start() {
+    LOG.debug("Starting LeaseManager service");
+    activeLeases = new ConcurrentHashMap<>();
+    leaseMonitor = new LeaseMonitor();
+    leaseMonitorThread = new Thread(leaseMonitor);
+    leaseMonitorThread.setName("LeaseManager#LeaseMonitor");
+    leaseMonitorThread.setDaemon(true);
+    leaseMonitorThread.setUncaughtExceptionHandler((thread, throwable) -> {
+      // Let us just restart this thread after logging an error.
+      // if this thread is not running we cannot handle Lease expiry.
+      LOG.error("LeaseMonitor thread encountered an error. Thread: {}",
+          thread.toString(), throwable);
+      leaseMonitorThread.start();
+    });
+    LOG.debug("Starting LeaseManager#LeaseMonitor Thread");
+    leaseMonitorThread.start();
+    isRunning = true;
+  }
+
+  /**
+   * Returns a lease for the specified resource with default timeout.
+   *
+   * @param resource
+   *        Resource for which lease has to be created
+   * @throws LeaseAlreadyExistException
+   *         If there is already a lease on the resource
+   */
+  public synchronized Lease<T> acquire(T resource)
+      throws LeaseAlreadyExistException {
+    return acquire(resource, defaultTimeout);
+  }
+
+  /**
+   * Returns a lease for the specified resource with the timeout provided.
+   *
+   * @param resource
+   *        Resource for which lease has to be created
+   * @param timeout
+   *        The timeout in milliseconds which has to be set on the lease
+   * @throws LeaseAlreadyExistException
+   *         If there is already a lease on the resource
+   */
+  public synchronized Lease<T> acquire(T resource, long timeout)
+      throws LeaseAlreadyExistException {
+    checkStatus();
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Acquiring lease on {} for {} milliseconds", resource, timeout);
+    }
+    if(activeLeases.containsKey(resource)) {
+      throw new LeaseAlreadyExistException("Resource: " + resource);
+    }
+    Lease<T> lease = new Lease<>(resource, timeout);
+    activeLeases.put(resource, lease);
+    leaseMonitorThread.interrupt();
+    return lease;
+  }
+
+  /**
+   * Returns a lease associated with the specified resource.
+   *
+   * @param resource
+   *        Resource for which the lease has to be returned
+   * @throws LeaseNotFoundException
+   *         If there is no active lease on the resource
+   */
+  public Lease<T> get(T resource) throws LeaseNotFoundException {
+    checkStatus();
+    Lease<T> lease = activeLeases.get(resource);
+    if(lease != null) {
+      return lease;
+    }
+    throw new LeaseNotFoundException("Resource: " + resource);
+  }
+
+  /**
+   * Releases the lease associated with the specified resource.
+   *
+   * @param resource
+   *        The for which the lease has to be released
+   * @throws LeaseNotFoundException
+   *         If there is no active lease on the resource
+   */
+  public synchronized void release(T resource)
+      throws LeaseNotFoundException {
+    checkStatus();
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Releasing lease on {}", resource);
+    }
+    Lease<T> lease = activeLeases.remove(resource);
+    if(lease == null) {
+      throw new LeaseNotFoundException("Resource: " + resource);
+    }
+    lease.invalidate();
+  }
+
+  /**
+   * Shuts down the LeaseManager and releases the resources. All the active
+   * {@link Lease} will be released (callbacks on leases will not be
+   * executed).
+   */
+  public void shutdown() {
+    checkStatus();
+    LOG.debug("Shutting down LeaseManager service");
+    leaseMonitor.disable();
+    leaseMonitorThread.interrupt();
+    for(T resource : activeLeases.keySet()) {
+      try {
+        release(resource);
+      }  catch(LeaseNotFoundException ex) {
+        //Ignore the exception, someone might have released the lease
+      }
+    }
+    isRunning = false;
+  }
+
+  /**
+   * Throws {@link LeaseManagerNotRunningException} if the service is not
+   * running.
+   */
+  private void checkStatus() {
+    if(!isRunning) {
+      throw new LeaseManagerNotRunningException("LeaseManager not running.");
+    }
+  }
+
+  /**
+   * Monitors the leases and expires them based on the timeout, also
+   * responsible for executing the callbacks of expired leases.
+   */
+  private final class LeaseMonitor implements Runnable {
+
+    private boolean monitor = true;
+    private ExecutorService executorService;
+
+    private LeaseMonitor() {
+      this.monitor = true;
+      this.executorService = Executors.newCachedThreadPool();
+    }
+
+    @Override
+    public void run() {
+      while(monitor) {
+        LOG.debug("LeaseMonitor: checking for lease expiry");
+        long sleepTime = Long.MAX_VALUE;
+
+        for (T resource : activeLeases.keySet()) {
+          try {
+            Lease<T> lease = get(resource);
+            long remainingTime = lease.getRemainingTime();
+            if (remainingTime <= 0) {
+              //Lease has timed out
+              List<Callable<Void>> leaseCallbacks = lease.getCallbacks();
+              release(resource);
+              executorService.execute(
+                  new LeaseCallbackExecutor(resource, leaseCallbacks));
+            } else {
+              sleepTime = remainingTime > sleepTime ?
+                  sleepTime : remainingTime;
+            }
+          } catch (LeaseNotFoundException | LeaseExpiredException ex) {
+            //Ignore the exception, someone might have released the lease
+          }
+        }
+
+        try {
+          if(!Thread.interrupted()) {
+            Thread.sleep(sleepTime);
+          }
+        } catch (InterruptedException ignored) {
+          // This means a new lease is added to activeLeases.
+        }
+      }
+    }
+
+    /**
+     * Disables lease monitor, next interrupt call on the thread
+     * will stop lease monitor.
+     */
+    public void disable() {
+      monitor = false;
+    }
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java
new file mode 100644
index 0000000..ced31de
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseManagerNotRunningException.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+/**
+ * This exception represents that there LeaseManager service is not running.
+ */
+public class LeaseManagerNotRunningException  extends RuntimeException {
+
+  /**
+   * Constructs an {@code LeaseManagerNotRunningException} with {@code null}
+   * as its error detail message.
+   */
+  public LeaseManagerNotRunningException() {
+    super();
+  }
+
+  /**
+   * Constructs an {@code LeaseManagerNotRunningException} with the specified
+   * detail message.
+   *
+   * @param message
+   *        The detail message (which is saved for later retrieval
+   *        by the {@link #getMessage()} method)
+   */
+  public LeaseManagerNotRunningException(String message) {
+    super(message);
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java
new file mode 100644
index 0000000..c292d33
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/LeaseNotFoundException.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+/**
+ * This exception represents that the lease that is being accessed does not
+ * exist.
+ */
+public class LeaseNotFoundException extends LeaseException {
+
+  /**
+   * Constructs an {@code LeaseNotFoundException} with {@code null}
+   * as its error detail message.
+   */
+  public LeaseNotFoundException() {
+    super();
+  }
+
+  /**
+   * Constructs an {@code LeaseNotFoundException} with the specified
+   * detail message.
+   *
+   * @param message
+   *        The detail message (which is saved for later retrieval
+   *        by the {@link #getMessage()} method)
+   */
+  public LeaseNotFoundException(String message) {
+    super(message);
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java
new file mode 100644
index 0000000..48ee2e1
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/lease/package-info.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+/**
+ * A generic lease management API which can be used if a service
+ * needs any kind of lease management.
+ */
+
+package org.apache.hadoop.ozone.lease;
+/*
+ This package contains lease management related classes.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/package-info.java
new file mode 100644
index 0000000..db399db
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/package-info.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+/**
+ This package contains class that support ozone implementation on the datanode
+ side.
+
+ Main parts of ozone on datanode are:
+
+ 1. REST Interface - This code lives under the web directory and listens to the
+ WebHDFS port.
+
+ 2. Datanode container classes: This support persistence of ozone objects on
+ datanode. These classes live under container directory.
+
+ 3. Client and Shell: We also support a ozone REST client lib, they are under
+ web/client and web/ozShell.
+
+ */
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java
new file mode 100644
index 0000000..fa79341
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/ScmBlockLocationProtocolServerSideTranslatorPB.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+import com.google.common.collect.Sets;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol;
+import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .AllocateScmBlockRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .AllocateScmBlockResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteKeyBlocksResultProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmKeyBlocksRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmKeyBlocksResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .GetScmBlockLocationsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .GetScmBlockLocationsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .ScmLocatedBlockProto;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This class is the server-side translator that forwards requests received on
+ * {@link StorageContainerLocationProtocolPB} to the
+ * {@link StorageContainerLocationProtocol} server implementation.
+ */
+@InterfaceAudience.Private
+public final class ScmBlockLocationProtocolServerSideTranslatorPB
+    implements ScmBlockLocationProtocolPB {
+
+  private final ScmBlockLocationProtocol impl;
+
+  /**
+   * Creates a new ScmBlockLocationProtocolServerSideTranslatorPB.
+   *
+   * @param impl {@link ScmBlockLocationProtocol} server implementation
+   */
+  public ScmBlockLocationProtocolServerSideTranslatorPB(
+      ScmBlockLocationProtocol impl) throws IOException {
+    this.impl = impl;
+  }
+
+
+  @Override
+  public GetScmBlockLocationsResponseProto getScmBlockLocations(
+      RpcController controller, GetScmBlockLocationsRequestProto req)
+      throws ServiceException {
+    Set<String> keys = Sets.newLinkedHashSetWithExpectedSize(
+        req.getKeysCount());
+    for (String key : req.getKeysList()) {
+      keys.add(key);
+    }
+    final Set<AllocatedBlock> blocks;
+    try {
+      blocks = impl.getBlockLocations(keys);
+    } catch (IOException ex) {
+      throw new ServiceException(ex);
+    }
+    GetScmBlockLocationsResponseProto.Builder resp =
+        GetScmBlockLocationsResponseProto.newBuilder();
+    for (AllocatedBlock block: blocks) {
+      ScmLocatedBlockProto.Builder locatedBlock =
+          ScmLocatedBlockProto.newBuilder()
+              .setKey(block.getKey())
+              .setPipeline(block.getPipeline().getProtobufMessage());
+      resp.addLocatedBlocks(locatedBlock.build());
+    }
+    return resp.build();
+  }
+
+  @Override
+  public AllocateScmBlockResponseProto allocateScmBlock(
+      RpcController controller, AllocateScmBlockRequestProto request)
+      throws ServiceException {
+    try {
+      AllocatedBlock allocatedBlock =
+          impl.allocateBlock(request.getSize(), request.getType(),
+              request.getFactor(), request.getOwner());
+      if (allocatedBlock != null) {
+        return
+            AllocateScmBlockResponseProto.newBuilder()
+                .setKey(allocatedBlock.getKey())
+                .setPipeline(allocatedBlock.getPipeline().getProtobufMessage())
+                .setCreateContainer(allocatedBlock.getCreateContainer())
+                .setErrorCode(AllocateScmBlockResponseProto.Error.success)
+                .build();
+      } else {
+        return AllocateScmBlockResponseProto.newBuilder()
+            .setErrorCode(AllocateScmBlockResponseProto.Error.unknownFailure)
+            .build();
+      }
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public DeleteScmKeyBlocksResponseProto deleteScmKeyBlocks(
+      RpcController controller, DeleteScmKeyBlocksRequestProto req)
+      throws ServiceException {
+    DeleteScmKeyBlocksResponseProto.Builder resp =
+        DeleteScmKeyBlocksResponseProto.newBuilder();
+    try {
+      List<BlockGroup> infoList = req.getKeyBlocksList().stream()
+          .map(BlockGroup::getFromProto).collect(Collectors.toList());
+      final List<DeleteBlockGroupResult> results =
+          impl.deleteKeyBlocks(infoList);
+      for (DeleteBlockGroupResult result: results) {
+        DeleteKeyBlocksResultProto.Builder deleteResult =
+            DeleteKeyBlocksResultProto
+            .newBuilder()
+            .setObjectKey(result.getObjectKey())
+            .addAllBlockResults(result.getBlockResultProtoList());
+        resp.addResults(deleteResult.build());
+      }
+    } catch (IOException ex) {
+      throw new ServiceException(ex);
+    }
+    return resp.build();
+  }
+
+  @Override
+  public HddsProtos.GetScmInfoRespsonseProto getScmInfo(
+      RpcController controller, HddsProtos.GetScmInfoRequestProto req)
+      throws ServiceException {
+    ScmInfo scmInfo;
+    try {
+      scmInfo = impl.getScmInfo();
+    } catch (IOException ex) {
+      throw new ServiceException(ex);
+    }
+    return HddsProtos.GetScmInfoRespsonseProto.newBuilder()
+        .setClusterId(scmInfo.getClusterId())
+        .setScmId(scmInfo.getScmId())
+        .build();
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java
new file mode 100644
index 0000000..4974268
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerLocationProtocolServerSideTranslatorPB.java
@@ -0,0 +1,212 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.GetContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.GetContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.PipelineRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.PipelineResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMDeleteContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMListContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMListContainerResponseProto;
+
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.List;
+
+/**
+ * This class is the server-side translator that forwards requests received on
+ * {@link StorageContainerLocationProtocolPB} to the
+ * {@link StorageContainerLocationProtocol} server implementation.
+ */
+@InterfaceAudience.Private
+public final class StorageContainerLocationProtocolServerSideTranslatorPB
+    implements StorageContainerLocationProtocolPB {
+
+  private final StorageContainerLocationProtocol impl;
+
+  /**
+   * Creates a new StorageContainerLocationProtocolServerSideTranslatorPB.
+   *
+   * @param impl {@link StorageContainerLocationProtocol} server implementation
+   */
+  public StorageContainerLocationProtocolServerSideTranslatorPB(
+      StorageContainerLocationProtocol impl) throws IOException {
+    this.impl = impl;
+  }
+
+  @Override
+  public ContainerResponseProto allocateContainer(RpcController unused,
+      ContainerRequestProto request) throws ServiceException {
+    try {
+      Pipeline pipeline = impl.allocateContainer(request.getReplicationType(),
+          request.getReplicationFactor(), request.getContainerName(),
+          request.getOwner());
+      return ContainerResponseProto.newBuilder()
+          .setPipeline(pipeline.getProtobufMessage())
+          .setErrorCode(ContainerResponseProto.Error.success)
+          .build();
+
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public GetContainerResponseProto getContainer(
+      RpcController controller, GetContainerRequestProto request)
+      throws ServiceException {
+    try {
+      Pipeline pipeline = impl.getContainer(request.getContainerName());
+      return GetContainerResponseProto.newBuilder()
+          .setPipeline(pipeline.getProtobufMessage())
+          .build();
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public SCMListContainerResponseProto listContainer(RpcController controller,
+      SCMListContainerRequestProto request) throws ServiceException {
+    try {
+      String startName = null;
+      String prefixName = null;
+      int count = -1;
+
+      // Arguments check.
+      if (request.hasPrefixName()) {
+        // End container name is given.
+        prefixName = request.getPrefixName();
+      }
+      if (request.hasStartName()) {
+        // End container name is given.
+        startName = request.getStartName();
+      }
+
+      count = request.getCount();
+      List<ContainerInfo> containerList =
+          impl.listContainer(startName, prefixName, count);
+      SCMListContainerResponseProto.Builder builder =
+          SCMListContainerResponseProto.newBuilder();
+      for (ContainerInfo container : containerList) {
+        builder.addContainers(container.getProtobuf());
+      }
+      return builder.build();
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public SCMDeleteContainerResponseProto deleteContainer(
+      RpcController controller, SCMDeleteContainerRequestProto request)
+      throws ServiceException {
+    try {
+      impl.deleteContainer(request.getContainerName());
+      return SCMDeleteContainerResponseProto.newBuilder().build();
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public StorageContainerLocationProtocolProtos.NodeQueryResponseProto
+      queryNode(RpcController controller,
+      StorageContainerLocationProtocolProtos.NodeQueryRequestProto request)
+      throws ServiceException {
+    try {
+      EnumSet<HddsProtos.NodeState> nodeStateEnumSet = EnumSet.copyOf(request
+          .getQueryList());
+      HddsProtos.NodePool datanodes = impl.queryNode(nodeStateEnumSet,
+          request.getScope(), request.getPoolName());
+      return StorageContainerLocationProtocolProtos
+          .NodeQueryResponseProto.newBuilder()
+          .setDatanodes(datanodes)
+          .build();
+    } catch (Exception e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public ObjectStageChangeResponseProto notifyObjectStageChange(
+      RpcController controller, ObjectStageChangeRequestProto request)
+      throws ServiceException {
+    try {
+      impl.notifyObjectStageChange(request.getType(), request.getName(),
+          request.getOp(), request.getStage());
+      return ObjectStageChangeResponseProto.newBuilder().build();
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public PipelineResponseProto allocatePipeline(
+      RpcController controller, PipelineRequestProto request)
+      throws ServiceException {
+    // TODO : Wiring this up requires one more patch.
+    return null;
+  }
+
+  @Override
+  public HddsProtos.GetScmInfoRespsonseProto getScmInfo(
+      RpcController controller, HddsProtos.GetScmInfoRequestProto req)
+      throws ServiceException {
+    try {
+      ScmInfo scmInfo = impl.getScmInfo();
+      return HddsProtos.GetScmInfoRespsonseProto.newBuilder()
+          .setClusterId(scmInfo.getClusterId())
+          .setScmId(scmInfo.getScmId())
+          .build();
+    } catch (IOException ex) {
+      throw new ServiceException(ex);
+    }
+
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
new file mode 100644
index 0000000..860386d
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.protocolPB;
+
+/**
+ * This package contains classes for the Protocol Buffers binding of Ozone
+ * protocols.
+ */
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java
new file mode 100644
index 0000000..af56da3
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/JsonUtils.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.utils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.type.CollectionType;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * JSON Utility functions used in ozone.
+ */
+public final class JsonUtils {
+
+  // Reuse ObjectMapper instance for improving performance.
+  // ObjectMapper is thread safe as long as we always configure instance
+  // before use.
+  private static final ObjectMapper MAPPER = new ObjectMapper();
+  private static final ObjectReader READER = MAPPER.readerFor(Object.class);
+  private static final ObjectWriter WRITTER =
+      MAPPER.writerWithDefaultPrettyPrinter();
+
+  private JsonUtils() {
+    // Never constructed
+  }
+
+  public static String toJsonStringWithDefaultPrettyPrinter(String jsonString)
+      throws IOException {
+    Object json = READER.readValue(jsonString);
+    return WRITTER.writeValueAsString(json);
+  }
+
+  public static String toJsonString(Object obj) throws IOException {
+    return MAPPER.writeValueAsString(obj);
+  }
+
+  /**
+   * Deserialize a list of elements from a given string,
+   * each element in the list is in the given type.
+   *
+   * @param str json string.
+   * @param elementType element type.
+   * @return List of elements of type elementType
+   * @throws IOException
+   */
+  public static List<?> toJsonList(String str, Class<?> elementType)
+      throws IOException {
+    CollectionType type = MAPPER.getTypeFactory()
+        .constructCollectionType(List.class, elementType);
+    return MAPPER.readValue(str, type);
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/package-info.java
new file mode 100644
index 0000000..e5812c0
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/web/utils/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.utils;
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java
new file mode 100644
index 0000000..431da64
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Lists;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.CompletionService;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorCompletionService;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * An abstract class for a background service in ozone.
+ * A background service schedules multiple child tasks in parallel
+ * in a certain period. In each interval, it waits until all the tasks
+ * finish execution and then schedule next interval.
+ */
+public abstract class BackgroundService {
+
+  @VisibleForTesting
+  public static final Logger LOG =
+      LoggerFactory.getLogger(BackgroundService.class);
+
+  // Executor to launch child tasks
+  private final ScheduledExecutorService exec;
+  private final ThreadGroup threadGroup;
+  private final ThreadFactory threadFactory;
+  private final String serviceName;
+  private final long interval;
+  private final long serviceTimeout;
+  private final TimeUnit unit;
+  private final PeriodicalTask service;
+
+  public BackgroundService(String serviceName, long interval,
+      TimeUnit unit, int threadPoolSize, long serviceTimeout) {
+    this.interval = interval;
+    this.unit = unit;
+    this.serviceName = serviceName;
+    this.serviceTimeout = serviceTimeout;
+    threadGroup = new ThreadGroup(serviceName);
+    ThreadFactory tf = r -> new Thread(threadGroup, r);
+    threadFactory = new ThreadFactoryBuilder()
+        .setThreadFactory(tf)
+        .setDaemon(true)
+        .setNameFormat(serviceName + "#%d")
+        .build();
+    exec = Executors.newScheduledThreadPool(threadPoolSize, threadFactory);
+    service = new PeriodicalTask();
+  }
+
+  protected ExecutorService getExecutorService() {
+    return this.exec;
+  }
+
+  @VisibleForTesting
+  public int getThreadCount() {
+    return threadGroup.activeCount();
+  }
+
+  @VisibleForTesting
+  public void triggerBackgroundTaskForTesting() {
+    service.run();
+  }
+
+  // start service
+  public void start() {
+    exec.scheduleWithFixedDelay(service, 0, interval, unit);
+  }
+
+  public abstract BackgroundTaskQueue getTasks();
+
+  /**
+   * Run one or more background tasks concurrently.
+   * Wait until all tasks to return the result.
+   */
+  public class PeriodicalTask implements Runnable {
+    @Override
+    public synchronized void run() {
+      LOG.debug("Running background service : {}", serviceName);
+      BackgroundTaskQueue tasks = getTasks();
+      if (tasks.isEmpty()) {
+        // No task found, or some problems to init tasks
+        // return and retry in next interval.
+        return;
+      }
+
+      LOG.debug("Number of background tasks to execute : {}", tasks.size());
+      CompletionService<BackgroundTaskResult> taskCompletionService =
+          new ExecutorCompletionService<>(exec);
+
+      List<Future<BackgroundTaskResult>> results = Lists.newArrayList();
+      while (tasks.size() > 0) {
+        BackgroundTask task = tasks.poll();
+        Future<BackgroundTaskResult> result =
+            taskCompletionService.submit(task);
+        results.add(result);
+      }
+
+      results.parallelStream().forEach(taskResultFuture -> {
+        try {
+          // Collect task results
+          BackgroundTaskResult result = serviceTimeout > 0
+              ? taskResultFuture.get(serviceTimeout, TimeUnit.MILLISECONDS)
+              : taskResultFuture.get();
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("task execution result size {}", result.getSize());
+          }
+        } catch (InterruptedException | ExecutionException e) {
+          LOG.warn(
+              "Background task fails to execute, "
+                  + "retrying in next interval", e);
+        } catch (TimeoutException e) {
+          LOG.warn("Background task executes timed out, "
+              + "retrying in next interval", e);
+        }
+      });
+    }
+  }
+
+  // shutdown and make sure all threads are properly released.
+  public void shutdown() {
+    LOG.info("Shutting down service {}", this.serviceName);
+    exec.shutdown();
+    try {
+      if (!exec.awaitTermination(60, TimeUnit.SECONDS)) {
+        exec.shutdownNow();
+      }
+    } catch (InterruptedException e) {
+      exec.shutdownNow();
+    }
+    if (threadGroup.activeCount() == 0 && !threadGroup.isDestroyed()) {
+      threadGroup.destroy();
+    }
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java
new file mode 100644
index 0000000..47e8ebc
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import java.util.concurrent.Callable;
+
+/**
+ * A task thread to run by {@link BackgroundService}.
+ */
+public interface BackgroundTask<T> extends Callable<T> {
+
+  int getPriority();
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java
new file mode 100644
index 0000000..b56ef0c
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import java.util.PriorityQueue;
+
+/**
+ * A priority queue that stores a number of {@link BackgroundTask}.
+ */
+public class BackgroundTaskQueue {
+
+  private final PriorityQueue<BackgroundTask> tasks;
+
+  public BackgroundTaskQueue() {
+    tasks = new PriorityQueue<>((task1, task2)
+        -> task1.getPriority() - task2.getPriority());
+  }
+
+  /**
+   * @return the head task in this queue.
+   */
+  public synchronized BackgroundTask poll() {
+    return tasks.poll();
+  }
+
+  /**
+   * Add a {@link BackgroundTask} to the queue,
+   * the task will be sorted by its priority.
+   *
+   * @param task
+   */
+  public synchronized void add(BackgroundTask task) {
+    tasks.add(task);
+  }
+
+  /**
+   * @return true if the queue contains no task, false otherwise.
+   */
+  public synchronized boolean isEmpty() {
+    return tasks.isEmpty();
+  }
+
+  /**
+   * @return the size of the queue.
+   */
+  public synchronized int size() {
+    return tasks.size();
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java
new file mode 100644
index 0000000..198300f
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.utils;
+
+/**
+ * Result of a {@link BackgroundTask}.
+ */
+public interface BackgroundTaskResult {
+
+  /**
+   * Returns the size of entries included in this result.
+   */
+  int getSize();
+
+  /**
+   * An empty task result implementation.
+   */
+  class EmptyTaskResult implements BackgroundTaskResult {
+
+    public static EmptyTaskResult newResult() {
+      return new EmptyTaskResult();
+    }
+
+    @Override
+    public int getSize() {
+      return 0;
+    }
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java
new file mode 100644
index 0000000..47699eb
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import com.google.common.collect.Lists;
+
+import java.util.List;
+
+/**
+ * An utility class to store a batch of DB write operations.
+ */
+public class BatchOperation {
+
+  /**
+   * Enum for write operations.
+   */
+  public enum Operation {
+    DELETE, PUT
+  }
+
+  private List<SingleOperation> operations =
+      Lists.newArrayList();
+
+  /**
+   * Add a PUT operation into the batch.
+   */
+  public void put(byte[] key, byte[] value) {
+    operations.add(new SingleOperation(Operation.PUT, key, value));
+  }
+
+  /**
+   * Add a DELETE operation into the batch.
+   */
+  public void delete(byte[] key) {
+    operations.add(new SingleOperation(Operation.DELETE, key, null));
+
+  }
+
+  public List<SingleOperation> getOperations() {
+    return operations;
+  }
+
+  /**
+   * A SingleOperation represents a PUT or DELETE operation
+   * and the data the operation needs to manipulates.
+   */
+  public static class SingleOperation {
+
+    private Operation opt;
+    private byte[] key;
+    private byte[] value;
+
+    public SingleOperation(Operation opt, byte[] key, byte[] value) {
+      this.opt = opt;
+      if (key == null) {
+        throw new IllegalArgumentException("key cannot be null");
+      }
+      this.key = key.clone();
+      this.value = value == null ? null : value.clone();
+    }
+
+    public Operation getOpt() {
+      return opt;
+    }
+
+    public byte[] getKey() {
+      return key.clone();
+    }
+
+    public byte[] getValue() {
+      return value == null ? null : value.clone();
+    }
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java
new file mode 100644
index 0000000..c407398
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import java.io.IOException;
+
+/**
+ * A consumer for metadata store key-value entries.
+ * Used by {@link MetadataStore} class.
+ */
+@FunctionalInterface
+public interface EntryConsumer {
+
+  /**
+   * Consumes a key and value and produces a boolean result.
+   * @param key key
+   * @param value value
+   * @return a boolean value produced by the consumer
+   * @throws IOException
+   */
+  boolean consume(byte[] key, byte[] value) throws IOException;
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java
new file mode 100644
index 0000000..83ca83d
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java
@@ -0,0 +1,380 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.fusesource.leveldbjni.JniDBFactory;
+import org.iq80.leveldb.DB;
+import org.iq80.leveldb.DBIterator;
+import org.iq80.leveldb.Options;
+import org.iq80.leveldb.ReadOptions;
+import org.iq80.leveldb.Snapshot;
+import org.iq80.leveldb.WriteBatch;
+import org.iq80.leveldb.WriteOptions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * LevelDB interface.
+ */
+public class LevelDBStore implements MetadataStore {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(LevelDBStore.class);
+
+  private DB db;
+  private final File dbFile;
+  private final Options dbOptions;
+  private final WriteOptions writeOptions;
+
+  public LevelDBStore(File dbPath, boolean createIfMissing)
+      throws IOException {
+    dbOptions = new Options();
+    dbOptions.createIfMissing(createIfMissing);
+    this.dbFile = dbPath;
+    this.writeOptions = new WriteOptions().sync(true);
+    openDB(dbPath, dbOptions);
+  }
+
+  /**
+   * Opens a DB file.
+   *
+   * @param dbPath          - DB File path
+   * @throws IOException
+   */
+  public LevelDBStore(File dbPath, Options options)
+      throws IOException {
+    dbOptions = options;
+    this.dbFile = dbPath;
+    this.writeOptions = new WriteOptions().sync(true);
+    openDB(dbPath, dbOptions);
+  }
+
+  private void openDB(File dbPath, Options options) throws IOException {
+    dbPath.getParentFile().mkdirs();
+    db = JniDBFactory.factory.open(dbPath, options);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("LevelDB successfully opened");
+      LOG.debug("[Option] cacheSize = " + options.cacheSize());
+      LOG.debug("[Option] createIfMissing = " + options.createIfMissing());
+      LOG.debug("[Option] blockSize = " + options.blockSize());
+      LOG.debug("[Option] compressionType= " + options.compressionType());
+      LOG.debug("[Option] maxOpenFiles= " + options.maxOpenFiles());
+      LOG.debug("[Option] writeBufferSize= "+ options.writeBufferSize());
+    }
+  }
+
+  /**
+   * Puts a Key into file.
+   *
+   * @param key   - key
+   * @param value - value
+   */
+  @Override
+  public void put(byte[] key, byte[] value) {
+    db.put(key, value, writeOptions);
+  }
+
+  /**
+   * Get Key.
+   *
+   * @param key key
+   * @return value
+   */
+  @Override
+  public byte[] get(byte[] key) {
+    return db.get(key);
+  }
+
+  /**
+   * Delete Key.
+   *
+   * @param key - Key
+   */
+  @Override
+  public void delete(byte[] key) {
+    db.delete(key);
+  }
+
+  /**
+   * Closes the DB.
+   *
+   * @throws IOException
+   */
+  @Override
+  public void close() throws IOException {
+    if (db != null){
+      db.close();
+    }
+  }
+
+  /**
+   * Returns true if the DB is empty.
+   *
+   * @return boolean
+   * @throws IOException
+   */
+  @Override
+  public boolean isEmpty() throws IOException {
+    try (DBIterator iter = db.iterator()) {
+      iter.seekToFirst();
+      boolean hasNext = !iter.hasNext();
+      return hasNext;
+    }
+  }
+
+  /**
+   * Returns the actual levelDB object.
+   * @return DB handle.
+   */
+  public DB getDB() {
+    return db;
+  }
+
+  /**
+   * Returns an iterator on all the key-value pairs in the DB.
+   * @return an iterator on DB entries.
+   */
+  public DBIterator getIterator() {
+    return db.iterator();
+  }
+
+
+  @Override
+  public void destroy() throws IOException {
+    close();
+    JniDBFactory.factory.destroy(dbFile, dbOptions);
+  }
+
+  @Override
+  public ImmutablePair<byte[], byte[]> peekAround(int offset,
+      byte[] from) throws IOException, IllegalArgumentException {
+    try (DBIterator it = db.iterator()) {
+      if (from == null) {
+        it.seekToFirst();
+      } else {
+        it.seek(from);
+      }
+      if (!it.hasNext()) {
+        return null;
+      }
+      switch (offset) {
+      case 0:
+        Entry<byte[], byte[]> current = it.next();
+        return new ImmutablePair<>(current.getKey(), current.getValue());
+      case 1:
+        if (it.next() != null && it.hasNext()) {
+          Entry<byte[], byte[]> next = it.peekNext();
+          return new ImmutablePair<>(next.getKey(), next.getValue());
+        }
+        break;
+      case -1:
+        if (it.hasPrev()) {
+          Entry<byte[], byte[]> prev = it.peekPrev();
+          return new ImmutablePair<>(prev.getKey(), prev.getValue());
+        }
+        break;
+      default:
+        throw new IllegalArgumentException(
+            "Position can only be -1, 0 " + "or 1, but found " + offset);
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public void iterate(byte[] from, EntryConsumer consumer)
+      throws IOException {
+    try (DBIterator iter = db.iterator()) {
+      if (from != null) {
+        iter.seek(from);
+      } else {
+        iter.seekToFirst();
+      }
+      while (iter.hasNext()) {
+        Entry<byte[], byte[]> current = iter.next();
+        if (!consumer.consume(current.getKey(),
+            current.getValue())) {
+          break;
+        }
+      }
+    }
+  }
+
+  /**
+   * Compacts the DB by removing deleted keys etc.
+   * @throws IOException if there is an error.
+   */
+  @Override
+  public void compactDB() throws IOException {
+    if(db != null) {
+      // From LevelDB docs : begin == null and end == null means the whole DB.
+      db.compactRange(null, null);
+    }
+  }
+
+  @Override
+  public void writeBatch(BatchOperation operation) throws IOException {
+    List<BatchOperation.SingleOperation> operations =
+        operation.getOperations();
+    if (!operations.isEmpty()) {
+      try (WriteBatch writeBatch = db.createWriteBatch()) {
+        for (BatchOperation.SingleOperation opt : operations) {
+          switch (opt.getOpt()) {
+          case DELETE:
+            writeBatch.delete(opt.getKey());
+            break;
+          case PUT:
+            writeBatch.put(opt.getKey(), opt.getValue());
+            break;
+          default:
+            throw new IllegalArgumentException("Invalid operation "
+                + opt.getOpt());
+          }
+        }
+        db.write(writeBatch);
+      }
+    }
+  }
+
+  @Override
+  public List<Map.Entry<byte[], byte[]>> getRangeKVs(byte[] startKey,
+      int count, MetadataKeyFilters.MetadataKeyFilter... filters)
+      throws IOException, IllegalArgumentException {
+    return getRangeKVs(startKey, count, false, filters);
+  }
+
+  @Override
+  public List<Map.Entry<byte[], byte[]>> getSequentialRangeKVs(byte[] startKey,
+      int count, MetadataKeyFilters.MetadataKeyFilter... filters)
+      throws IOException, IllegalArgumentException {
+    return getRangeKVs(startKey, count, true, filters);
+  }
+
+  /**
+   * Returns a certain range of key value pairs as a list based on a
+   * startKey or count. Further a {@link MetadataKeyFilter} can be added to
+   * filter keys if necessary. To prevent race conditions while listing
+   * entries, this implementation takes a snapshot and lists the entries from
+   * the snapshot. This may, on the other hand, cause the range result slight
+   * different with actual data if data is updating concurrently.
+   * <p>
+   * If the startKey is specified and found in levelDB, this key and the keys
+   * after this key will be included in the result. If the startKey is null
+   * all entries will be included as long as other conditions are satisfied.
+   * If the given startKey doesn't exist, an empty list will be returned.
+   * <p>
+   * The count argument is to limit number of total entries to return,
+   * the value for count must be an integer greater than 0.
+   * <p>
+   * This method allows to specify one or more {@link MetadataKeyFilter}
+   * to filter keys by certain condition. Once given, only the entries
+   * whose key passes all the filters will be included in the result.
+   *
+   * @param startKey a start key.
+   * @param count max number of entries to return.
+   * @param filters customized one or more {@link MetadataKeyFilter}.
+   * @return a list of entries found in the database or an empty list if the
+   * startKey is invalid.
+   * @throws IOException if there are I/O errors.
+   * @throws IllegalArgumentException if count is less than 0.
+   */
+  private List<Entry<byte[], byte[]>> getRangeKVs(byte[] startKey,
+      int count, boolean sequential, MetadataKeyFilter... filters)
+      throws IOException {
+    List<Entry<byte[], byte[]>> result = new ArrayList<>();
+    long start = System.currentTimeMillis();
+    if (count < 0) {
+      throw new IllegalArgumentException(
+          "Invalid count given " + count + ", count must be greater than 0");
+    }
+    Snapshot snapShot = null;
+    DBIterator dbIter = null;
+    try {
+      snapShot = db.getSnapshot();
+      ReadOptions readOptions = new ReadOptions().snapshot(snapShot);
+      dbIter = db.iterator(readOptions);
+      if (startKey == null) {
+        dbIter.seekToFirst();
+      } else {
+        if (db.get(startKey) == null) {
+          // Key not found, return empty list
+          return result;
+        }
+        dbIter.seek(startKey);
+      }
+      while (dbIter.hasNext() && result.size() < count) {
+        byte[] preKey = dbIter.hasPrev() ? dbIter.peekPrev().getKey() : null;
+        byte[] nextKey = dbIter.hasNext() ? dbIter.peekNext().getKey() : null;
+        Entry<byte[], byte[]> current = dbIter.next();
+
+        if (filters == null) {
+          result.add(current);
+        } else {
+          if (Arrays.asList(filters).stream().allMatch(
+              entry -> entry.filterKey(preKey, current.getKey(), nextKey))) {
+            result.add(current);
+          } else {
+            if (result.size() > 0 && sequential) {
+              // if the caller asks for a sequential range of results,
+              // and we met a dis-match, abort iteration from here.
+              // if result is empty, we continue to look for the first match.
+              break;
+            }
+          }
+        }
+      }
+    } finally {
+      if (snapShot != null) {
+        snapShot.close();
+      }
+      if (dbIter != null) {
+        dbIter.close();
+      }
+      if (LOG.isDebugEnabled()) {
+        if (filters != null) {
+          for (MetadataKeyFilters.MetadataKeyFilter filter : filters) {
+            int scanned = filter.getKeysScannedNum();
+            int hinted = filter.getKeysHintedNum();
+            if (scanned > 0 || hinted > 0) {
+              LOG.debug(
+                  "getRangeKVs ({}) numOfKeysScanned={}, numOfKeysHinted={}",
+                  filter.getClass().getSimpleName(), filter.getKeysScannedNum(),
+                  filter.getKeysHintedNum());
+            }
+          }
+        }
+        long end = System.currentTimeMillis();
+        long timeConsumed = end - start;
+        LOG.debug("Time consumed for getRangeKVs() is {}ms,"
+            + " result length is {}.", timeConsumed, result.size());
+      }
+    }
+    return result;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java
new file mode 100644
index 0000000..3ff0a94
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.utils;
+
+import com.google.common.base.Strings;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.ozone.OzoneConsts;
+
+/**
+ * An utility class to filter levelDB keys.
+ */
+public final class MetadataKeyFilters {
+
+  private static KeyPrefixFilter deletingKeyFilter =
+      new MetadataKeyFilters.KeyPrefixFilter(OzoneConsts.DELETING_KEY_PREFIX);
+
+  private static KeyPrefixFilter normalKeyFilter =
+      new MetadataKeyFilters.KeyPrefixFilter(OzoneConsts.DELETING_KEY_PREFIX,
+          true);
+
+  private MetadataKeyFilters() {
+  }
+
+  public static KeyPrefixFilter getDeletingKeyFilter() {
+    return deletingKeyFilter;
+  }
+
+  public static KeyPrefixFilter getNormalKeyFilter() {
+    return normalKeyFilter;
+  }
+  /**
+   * Interface for levelDB key filters.
+   */
+  public interface MetadataKeyFilter {
+    /**
+     * Filter levelDB key with a certain condition.
+     *
+     * @param preKey     previous key.
+     * @param currentKey current key.
+     * @param nextKey    next key.
+     * @return true if a certain condition satisfied, return false otherwise.
+     */
+    boolean filterKey(byte[] preKey, byte[] currentKey, byte[] nextKey);
+
+    default int getKeysScannedNum() {
+      return 0;
+    }
+
+    default int getKeysHintedNum() {
+      return 0;
+    }
+  }
+
+  /**
+   * Utility class to filter key by a string prefix. This filter
+   * assumes keys can be parsed to a string.
+   */
+  public static class KeyPrefixFilter implements MetadataKeyFilter {
+
+    private String keyPrefix = null;
+    private int keysScanned = 0;
+    private int keysHinted = 0;
+    private Boolean negative;
+
+    public KeyPrefixFilter(String keyPrefix) {
+      this(keyPrefix, false);
+    }
+
+    public KeyPrefixFilter(String keyPrefix, boolean negative) {
+      this.keyPrefix = keyPrefix;
+      this.negative = negative;
+    }
+
+    @Override
+    public boolean filterKey(byte[] preKey, byte[] currentKey,
+        byte[] nextKey) {
+      keysScanned++;
+      boolean accept = false;
+      if (Strings.isNullOrEmpty(keyPrefix)) {
+        accept = true;
+      } else {
+        if (currentKey != null &&
+            DFSUtil.bytes2String(currentKey).startsWith(keyPrefix)) {
+          keysHinted++;
+          accept = true;
+        } else {
+          accept = false;
+        }
+      }
+      return (negative) ? !accept : accept;
+    }
+
+    @Override
+    public int getKeysScannedNum() {
+      return keysScanned;
+    }
+
+    @Override
+    public int getKeysHintedNum() {
+      return keysHinted;
+    }
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java
new file mode 100644
index 0000000..b90b08f
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Interface for key-value store that stores ozone metadata.
+ * Ozone metadata is stored as key value pairs, both key and value
+ * are arbitrary byte arrays.
+ */
+@InterfaceStability.Evolving
+public interface MetadataStore extends Closeable{
+
+  /**
+   * Puts a key-value pair into the store.
+   *
+   * @param key metadata key
+   * @param value metadata value
+   */
+  void put(byte[] key, byte[] value) throws IOException;
+
+  /**
+   * @return true if the metadata store is empty.
+   *
+   * @throws IOException
+   */
+  boolean isEmpty() throws IOException;
+
+  /**
+   * Returns the value mapped to the given key in byte array.
+   *
+   * @param key metadata key
+   * @return value in byte array
+   * @throws IOException
+   */
+  byte[] get(byte[] key) throws IOException;
+
+  /**
+   * Deletes a key from the metadata store.
+   *
+   * @param key metadata key
+   * @throws IOException
+   */
+  void delete(byte[] key) throws IOException;
+
+  /**
+   * Returns a certain range of key value pairs as a list based on a
+   * startKey or count. Further a {@link MetadataKeyFilter} can be added to
+   * filter keys if necessary. To prevent race conditions while listing
+   * entries, this implementation takes a snapshot and lists the entries from
+   * the snapshot. This may, on the other hand, cause the range result slight
+   * different with actual data if data is updating concurrently.
+   * <p>
+   * If the startKey is specified and found in levelDB, this key and the keys
+   * after this key will be included in the result. If the startKey is null
+   * all entries will be included as long as other conditions are satisfied.
+   * If the given startKey doesn't exist and empty list will be returned.
+   * <p>
+   * The count argument is to limit number of total entries to return,
+   * the value for count must be an integer greater than 0.
+   * <p>
+   * This method allows to specify one or more {@link MetadataKeyFilter}
+   * to filter keys by certain condition. Once given, only the entries
+   * whose key passes all the filters will be included in the result.
+   *
+   * @param startKey a start key.
+   * @param count max number of entries to return.
+   * @param filters customized one or more {@link MetadataKeyFilter}.
+   * @return a list of entries found in the database or an empty list if the
+   * startKey is invalid.
+   * @throws IOException if there are I/O errors.
+   * @throws IllegalArgumentException if count is less than 0.
+   */
+  List<Map.Entry<byte[], byte[]>> getRangeKVs(byte[] startKey,
+      int count, MetadataKeyFilter... filters)
+      throws IOException, IllegalArgumentException;
+
+  /**
+   * This method is very similar to {@link #getRangeKVs}, the only
+   * different is this method is supposed to return a sequential range
+   * of elements based on the filters. While iterating the elements,
+   * if it met any entry that cannot pass the filter, the iterator will stop
+   * from this point without looking for next match. If no filter is given,
+   * this method behaves just like {@link #getRangeKVs}.
+   *
+   * @param startKey a start key.
+   * @param count max number of entries to return.
+   * @param filters customized one or more {@link MetadataKeyFilter}.
+   * @return a list of entries found in the database.
+   * @throws IOException
+   * @throws IllegalArgumentException
+   */
+  List<Map.Entry<byte[], byte[]>> getSequentialRangeKVs(byte[] startKey,
+      int count, MetadataKeyFilter... filters)
+      throws IOException, IllegalArgumentException;
+
+  /**
+   * A batch of PUT, DELETE operations handled as a single atomic write.
+   *
+   * @throws IOException write fails
+   */
+  void writeBatch(BatchOperation operation) throws IOException;
+
+  /**
+   * Compact the entire database.
+   * @throws IOException
+   */
+  void compactDB() throws IOException;
+
+  /**
+   * Destroy the content of the specified database,
+   * a destroyed database will not be able to load again.
+   * Be very careful with this method.
+   *
+   * @throws IOException if I/O error happens
+   */
+  void destroy() throws IOException;
+
+  /**
+   * Seek the database to a certain key, returns the key-value
+   * pairs around this key based on the given offset. Note, this method
+   * can only support offset -1 (left), 0 (current) and 1 (right),
+   * any other offset given will cause a {@link IllegalArgumentException}.
+   *
+   * @param offset offset to the key
+   * @param from from which key
+   * @return a key-value pair
+   * @throws IOException
+   */
+  ImmutablePair<byte[], byte[]> peekAround(int offset, byte[] from)
+      throws IOException, IllegalArgumentException;
+
+  /**
+   * Iterates entries in the database from a certain key.
+   * Applies the given {@link EntryConsumer} to the key and value of
+   * each entry, the function produces a boolean result which is used
+   * as the criteria to exit from iteration.
+   *
+   * @param from the start key
+   * @param consumer
+   *   a {@link EntryConsumer} applied to each key and value. If the consumer
+   *   returns true, continues the iteration to next entry; otherwise exits
+   *   the iteration.
+   * @throws IOException
+   */
+  void iterate(byte[] from, EntryConsumer consumer)
+      throws IOException;
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java
new file mode 100644
index 0000000..9e9c32a
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.iq80.leveldb.Options;
+import org.rocksdb.BlockBasedTableConfig;
+import org.rocksdb.Statistics;
+import org.rocksdb.StatsLevel;
+
+import java.io.File;
+import java.io.IOException;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_METADATA_STORE_IMPL_LEVELDB;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_METADATA_STORE_IMPL_ROCKSDB;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_METADATA_STORE_ROCKSDB_STATISTICS;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_METADATA_STORE_ROCKSDB_STATISTICS_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_METADATA_STORE_ROCKSDB_STATISTICS_OFF;
+
+/**
+ * Builder for metadata store.
+ */
+public class MetadataStoreBuilder {
+
+  private File dbFile;
+  private long cacheSize;
+  private boolean createIfMissing = true;
+  private Configuration conf;
+
+  public static MetadataStoreBuilder newBuilder() {
+    return new MetadataStoreBuilder();
+  }
+
+  public MetadataStoreBuilder setDbFile(File dbPath) {
+    this.dbFile = dbPath;
+    return this;
+  }
+
+  public MetadataStoreBuilder setCacheSize(long cache) {
+    this.cacheSize = cache;
+    return this;
+  }
+
+  public MetadataStoreBuilder setCreateIfMissing(boolean doCreate) {
+    this.createIfMissing = doCreate;
+    return this;
+  }
+
+  public MetadataStoreBuilder setConf(Configuration configuration) {
+    this.conf = configuration;
+    return this;
+  }
+
+  public MetadataStore build() throws IOException {
+    if (dbFile == null) {
+      throw new IllegalArgumentException("Failed to build metadata store, "
+          + "dbFile is required but not found");
+    }
+
+    // Build db store based on configuration
+    MetadataStore store = null;
+    String impl = conf == null ?
+        OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_DEFAULT :
+        conf.getTrimmed(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL,
+            OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_DEFAULT);
+    if (OZONE_METADATA_STORE_IMPL_LEVELDB.equals(impl)) {
+      Options options = new Options();
+      options.createIfMissing(createIfMissing);
+      if (cacheSize > 0) {
+        options.cacheSize(cacheSize);
+      }
+      store = new LevelDBStore(dbFile, options);
+    } else if (OZONE_METADATA_STORE_IMPL_ROCKSDB.equals(impl)) {
+      org.rocksdb.Options opts = new org.rocksdb.Options();
+      opts.setCreateIfMissing(createIfMissing);
+
+      if (cacheSize > 0) {
+        BlockBasedTableConfig tableConfig = new BlockBasedTableConfig();
+        tableConfig.setBlockCacheSize(cacheSize);
+        opts.setTableFormatConfig(tableConfig);
+      }
+
+      String rocksDbStat = conf == null ?
+          OZONE_METADATA_STORE_ROCKSDB_STATISTICS_DEFAULT :
+          conf.getTrimmed(OZONE_METADATA_STORE_ROCKSDB_STATISTICS,
+              OZONE_METADATA_STORE_ROCKSDB_STATISTICS_DEFAULT);
+
+      if (!rocksDbStat.equals(OZONE_METADATA_STORE_ROCKSDB_STATISTICS_OFF)) {
+        Statistics statistics = new Statistics();
+        statistics.setStatsLevel(StatsLevel.valueOf(rocksDbStat));
+        opts = opts.setStatistics(statistics);
+
+      }
+      store = new RocksDBStore(dbFile, opts);
+    } else {
+      throw new IllegalArgumentException("Invalid argument for "
+          + OzoneConfigKeys.OZONE_METADATA_STORE_IMPL
+          + ". Expecting " + OZONE_METADATA_STORE_IMPL_LEVELDB
+          + " or " + OZONE_METADATA_STORE_IMPL_ROCKSDB
+          + ", but met " + impl);
+    }
+    return store;
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java
new file mode 100644
index 0000000..a60e98d
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java
@@ -0,0 +1,382 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.ratis.shaded.com.google.common.annotations.VisibleForTesting;
+import org.rocksdb.DbPath;
+import org.rocksdb.Options;
+import org.rocksdb.RocksDB;
+import org.rocksdb.RocksDBException;
+import org.rocksdb.RocksIterator;
+import org.rocksdb.WriteBatch;
+import org.rocksdb.WriteOptions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.management.ObjectName;
+import java.io.File;
+import java.io.IOException;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * RocksDB implementation of ozone metadata store.
+ */
+public class RocksDBStore implements MetadataStore {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RocksDBStore.class);
+
+  private RocksDB db = null;
+  private File dbLocation;
+  private WriteOptions writeOptions;
+  private Options dbOptions;
+  private ObjectName statMBeanName;
+
+  public RocksDBStore(File dbFile, Options options)
+      throws IOException {
+    Preconditions.checkNotNull(dbFile, "DB file location cannot be null");
+    RocksDB.loadLibrary();
+    dbOptions = options;
+    dbLocation = dbFile;
+    writeOptions = new WriteOptions();
+    try {
+
+      db = RocksDB.open(dbOptions, dbLocation.getAbsolutePath());
+      if (dbOptions.statistics() != null) {
+
+        Map<String, String> jmxProperties = new HashMap<String, String>();
+        jmxProperties.put("dbName", dbFile.getName());
+        statMBeanName = MBeans.register("Ozone", "RocksDbStore", jmxProperties,
+            new RocksDBStoreMBean(dbOptions.statistics()));
+        if (statMBeanName == null) {
+          LOG.warn("jmx registration failed during RocksDB init, db path :{}",
+              dbFile.getAbsolutePath());
+        }
+      }
+    } catch (RocksDBException e) {
+      throw new IOException(
+          "Failed init RocksDB, db path : " + dbFile.getAbsolutePath(), e);
+    }
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("RocksDB successfully opened.");
+      LOG.debug("[Option] dbLocation= {}", dbLocation.getAbsolutePath());
+      LOG.debug("[Option] createIfMissing = {}", options.createIfMissing());
+      LOG.debug("[Option] compactionPriority= {}", options.compactionStyle());
+      LOG.debug("[Option] compressionType= {}", options.compressionType());
+      LOG.debug("[Option] maxOpenFiles= {}", options.maxOpenFiles());
+      LOG.debug("[Option] writeBufferSize= {}", options.writeBufferSize());
+    }
+  }
+
+  private IOException toIOException(String msg, RocksDBException e) {
+    String statusCode = e.getStatus() == null ? "N/A" :
+        e.getStatus().getCodeString();
+    String errMessage = e.getMessage() == null ? "Unknown error" :
+        e.getMessage();
+    String output = msg + "; status : " + statusCode
+        + "; message : " + errMessage;
+    return new IOException(output, e);
+  }
+
+  @Override
+  public void put(byte[] key, byte[] value) throws IOException {
+    try {
+      db.put(writeOptions, key, value);
+    } catch (RocksDBException e) {
+      throw toIOException("Failed to put key-value to metadata store", e);
+    }
+  }
+
+  @Override
+  public boolean isEmpty() throws IOException {
+    RocksIterator it = null;
+    try {
+      it = db.newIterator();
+      it.seekToFirst();
+      return !it.isValid();
+    } finally {
+      if (it != null) {
+        it.close();
+      }
+    }
+  }
+
+  @Override
+  public byte[] get(byte[] key) throws IOException {
+    try {
+      return db.get(key);
+    } catch (RocksDBException e) {
+      throw toIOException("Failed to get the value for the given key", e);
+    }
+  }
+
+  @Override
+  public void delete(byte[] key) throws IOException {
+    try {
+      db.delete(key);
+    } catch (RocksDBException e) {
+      throw toIOException("Failed to delete the given key", e);
+    }
+  }
+
+  @Override
+  public List<Map.Entry<byte[], byte[]>> getRangeKVs(byte[] startKey,
+      int count, MetadataKeyFilters.MetadataKeyFilter... filters)
+      throws IOException, IllegalArgumentException {
+    return getRangeKVs(startKey, count, false, filters);
+  }
+
+  @Override
+  public List<Map.Entry<byte[], byte[]>> getSequentialRangeKVs(byte[] startKey,
+      int count, MetadataKeyFilters.MetadataKeyFilter... filters)
+      throws IOException, IllegalArgumentException {
+    return getRangeKVs(startKey, count, true, filters);
+  }
+
+  private List<Map.Entry<byte[], byte[]>> getRangeKVs(byte[] startKey,
+      int count, boolean sequential,
+      MetadataKeyFilters.MetadataKeyFilter... filters)
+      throws IOException, IllegalArgumentException {
+    List<Map.Entry<byte[], byte[]>> result = new ArrayList<>();
+    long start = System.currentTimeMillis();
+    if (count < 0) {
+      throw new IllegalArgumentException(
+          "Invalid count given " + count + ", count must be greater than 0");
+    }
+    RocksIterator it = null;
+    try {
+      it = db.newIterator();
+      if (startKey == null) {
+        it.seekToFirst();
+      } else {
+        if(get(startKey) == null) {
+          // Key not found, return empty list
+          return result;
+        }
+        it.seek(startKey);
+      }
+      while(it.isValid() && result.size() < count) {
+        byte[] currentKey = it.key();
+        byte[] currentValue = it.value();
+
+        it.prev();
+        final byte[] prevKey = it.isValid() ? it.key() : null;
+
+        it.seek(currentKey);
+        it.next();
+        final byte[] nextKey = it.isValid() ? it.key() : null;
+
+        if (filters == null) {
+          result.add(new AbstractMap.SimpleImmutableEntry<>(currentKey,
+              currentValue));
+        } else {
+          if (Arrays.asList(filters).stream()
+              .allMatch(entry -> entry.filterKey(prevKey,
+                  currentKey, nextKey))) {
+            result.add(new AbstractMap.SimpleImmutableEntry<>(currentKey,
+                currentValue));
+          } else {
+            if (result.size() > 0 && sequential) {
+              // if the caller asks for a sequential range of results,
+              // and we met a dis-match, abort iteration from here.
+              // if result is empty, we continue to look for the first match.
+              break;
+            }
+          }
+        }
+      }
+    } finally {
+      if (it != null) {
+        it.close();
+      }
+      long end = System.currentTimeMillis();
+      long timeConsumed = end - start;
+      if (LOG.isDebugEnabled()) {
+        if (filters != null) {
+          for (MetadataKeyFilters.MetadataKeyFilter filter : filters) {
+            int scanned = filter.getKeysScannedNum();
+            int hinted = filter.getKeysHintedNum();
+            if (scanned > 0 || hinted > 0) {
+              LOG.debug(
+                  "getRangeKVs ({}) numOfKeysScanned={}, numOfKeysHinted={}",
+                  filter.getClass().getSimpleName(), filter.getKeysScannedNum(),
+                  filter.getKeysHintedNum());
+            }
+          }
+        }
+        LOG.debug("Time consumed for getRangeKVs() is {}ms,"
+            + " result length is {}.", timeConsumed, result.size());
+      }
+    }
+    return result;
+  }
+
+  @Override
+  public void writeBatch(BatchOperation operation)
+      throws IOException {
+    List<BatchOperation.SingleOperation> operations =
+        operation.getOperations();
+    if (!operations.isEmpty()) {
+      try (WriteBatch writeBatch = new WriteBatch()) {
+        for (BatchOperation.SingleOperation opt : operations) {
+          switch (opt.getOpt()) {
+          case DELETE:
+            writeBatch.remove(opt.getKey());
+            break;
+          case PUT:
+            writeBatch.put(opt.getKey(), opt.getValue());
+            break;
+          default:
+            throw new IllegalArgumentException("Invalid operation "
+                + opt.getOpt());
+          }
+        }
+        db.write(writeOptions, writeBatch);
+      } catch (RocksDBException e) {
+        throw toIOException("Batch write operation failed", e);
+      }
+    }
+  }
+
+  @Override
+  public void compactDB() throws IOException {
+    if (db != null) {
+      try {
+        db.compactRange();
+      } catch (RocksDBException e) {
+        throw toIOException("Failed to compact db", e);
+      }
+    }
+  }
+
+  private void deleteQuietly(File fileOrDir) {
+    if (fileOrDir != null && fileOrDir.exists()) {
+      try {
+        FileUtils.forceDelete(fileOrDir);
+      } catch (IOException e) {
+        LOG.warn("Failed to delete dir {}", fileOrDir.getAbsolutePath(), e);
+      }
+    }
+  }
+
+  @Override
+  public void destroy() throws IOException {
+    // Make sure db is closed.
+    close();
+
+    // There is no destroydb java API available,
+    // equivalently we can delete all db directories.
+    deleteQuietly(dbLocation);
+    deleteQuietly(new File(dbOptions.dbLogDir()));
+    deleteQuietly(new File(dbOptions.walDir()));
+    List<DbPath> dbPaths = dbOptions.dbPaths();
+    if (dbPaths != null) {
+      dbPaths.forEach(dbPath -> {
+        deleteQuietly(new File(dbPath.toString()));
+      });
+    }
+  }
+
+  @Override
+  public ImmutablePair<byte[], byte[]> peekAround(int offset,
+      byte[] from) throws IOException, IllegalArgumentException {
+    RocksIterator it = null;
+    try {
+      it = db.newIterator();
+      if (from == null) {
+        it.seekToFirst();
+      } else {
+        it.seek(from);
+      }
+      if (!it.isValid()) {
+        return null;
+      }
+
+      switch (offset) {
+      case 0:
+        break;
+      case 1:
+        it.next();
+        break;
+      case -1:
+        it.prev();
+        break;
+      default:
+        throw new IllegalArgumentException(
+            "Position can only be -1, 0 " + "or 1, but found " + offset);
+      }
+      return it.isValid() ? new ImmutablePair<>(it.key(), it.value()) : null;
+    } finally {
+      if (it != null) {
+        it.close();
+      }
+    }
+  }
+
+  @Override
+  public void iterate(byte[] from, EntryConsumer consumer)
+      throws IOException {
+    RocksIterator it = null;
+    try {
+      it = db.newIterator();
+      if (from != null) {
+        it.seek(from);
+      } else {
+        it.seekToFirst();
+      }
+      while (it.isValid()) {
+        if (!consumer.consume(it.key(), it.value())) {
+          break;
+        }
+        it.next();
+      }
+    } finally {
+      if (it != null) {
+        it.close();
+      }
+    }
+  }
+
+  @Override
+  public void close() throws IOException {
+    if (statMBeanName != null) {
+      MBeans.unregister(statMBeanName);
+    }
+    if (db != null) {
+      db.close();
+    }
+
+  }
+
+  @VisibleForTesting
+  protected ObjectName getStatMBeanName() {
+    return statMBeanName;
+  }
+
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java
new file mode 100644
index 0000000..88c093e
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.utils;
+
+import org.rocksdb.HistogramData;
+import org.rocksdb.HistogramType;
+import org.rocksdb.Statistics;
+import org.rocksdb.TickerType;
+
+import javax.management.Attribute;
+import javax.management.AttributeList;
+import javax.management.AttributeNotFoundException;
+import javax.management.DynamicMBean;
+import javax.management.InvalidAttributeValueException;
+import javax.management.MBeanAttributeInfo;
+import javax.management.MBeanException;
+import javax.management.MBeanInfo;
+import javax.management.ReflectionException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Adapter JMX bean to publish all the Rocksdb metrics.
+ */
+public class RocksDBStoreMBean implements DynamicMBean {
+
+  private Statistics statistics;
+
+  private Set<String> histogramAttributes = new HashSet<>();
+
+  public RocksDBStoreMBean(Statistics statistics) {
+    this.statistics = statistics;
+    histogramAttributes.add("Average");
+    histogramAttributes.add("Median");
+    histogramAttributes.add("Percentile95");
+    histogramAttributes.add("Percentile99");
+    histogramAttributes.add("StandardDeviation");
+  }
+
+  @Override
+  public Object getAttribute(String attribute)
+      throws AttributeNotFoundException, MBeanException, ReflectionException {
+    for (String histogramAttribute : histogramAttributes) {
+      if (attribute.endsWith("_" + histogramAttribute.toUpperCase())) {
+        String keyName = attribute
+            .substring(0, attribute.length() - histogramAttribute.length() - 1);
+        try {
+          HistogramData histogram =
+              statistics.getHistogramData(HistogramType.valueOf(keyName));
+          try {
+            Method method =
+                HistogramData.class.getMethod("get" + histogramAttribute);
+            return method.invoke(histogram);
+          } catch (Exception e) {
+            throw new ReflectionException(e,
+                "Can't read attribute " + attribute);
+          }
+        } catch (IllegalArgumentException exception) {
+          throw new AttributeNotFoundException(
+              "No such attribute in RocksDB stats: " + attribute);
+        }
+      }
+    }
+    try {
+      return statistics.getTickerCount(TickerType.valueOf(attribute));
+    } catch (IllegalArgumentException ex) {
+      throw new AttributeNotFoundException(
+          "No such attribute in RocksDB stats: " + attribute);
+    }
+  }
+
+  @Override
+  public void setAttribute(Attribute attribute)
+      throws AttributeNotFoundException, InvalidAttributeValueException,
+      MBeanException, ReflectionException {
+
+  }
+
+  @Override
+  public AttributeList getAttributes(String[] attributes) {
+    AttributeList result = new AttributeList();
+    for (String attributeName : attributes) {
+      try {
+        Object value = getAttribute(attributeName);
+        result.add(value);
+      } catch (Exception e) {
+        //TODO
+      }
+    }
+    return result;
+  }
+
+  @Override
+  public AttributeList setAttributes(AttributeList attributes) {
+    return null;
+  }
+
+  @Override
+  public Object invoke(String actionName, Object[] params, String[] signature)
+      throws MBeanException, ReflectionException {
+    return null;
+  }
+
+  @Override
+  public MBeanInfo getMBeanInfo() {
+
+    List<MBeanAttributeInfo> attributes = new ArrayList<>();
+    for (TickerType tickerType : TickerType.values()) {
+      attributes.add(new MBeanAttributeInfo(tickerType.name(), "long",
+          "RocksDBStat: " + tickerType.name(), true, false, false));
+    }
+    for (HistogramType histogramType : HistogramType.values()) {
+      for (String histogramAttribute : histogramAttributes) {
+        attributes.add(new MBeanAttributeInfo(
+            histogramType.name() + "_" + histogramAttribute.toUpperCase(),
+            "long", "RocksDBStat: " + histogramType.name(), true, false,
+            false));
+      }
+    }
+
+    return new MBeanInfo("", "RocksDBStat",
+        attributes.toArray(new MBeanAttributeInfo[0]), null, null, null);
+
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/package-info.java
new file mode 100644
index 0000000..4466337
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.utils;
diff --git a/hadoop-hdds/common/src/main/java/org/apache/ratis/RatisHelper.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/RatisHelper.java
new file mode 100644
index 0000000..3a55831
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/ratis/RatisHelper.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.ratis;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.ratis.client.RaftClient;
+import org.apache.ratis.conf.RaftProperties;
+import org.apache.ratis.grpc.GrpcConfigKeys;
+import org.apache.ratis.protocol.RaftGroup;
+import org.apache.ratis.protocol.RaftGroupId;
+import org.apache.ratis.protocol.RaftPeer;
+import org.apache.ratis.protocol.RaftPeerId;
+import org.apache.ratis.rpc.RpcType;
+import org.apache.ratis.shaded.com.google.protobuf.ByteString;
+import org.apache.ratis.util.SizeInBytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Ratis helper methods.
+ */
+public interface RatisHelper {
+  Logger LOG = LoggerFactory.getLogger(RatisHelper.class);
+
+  static String toRaftPeerIdString(DatanodeDetails id) {
+    return id.getUuidString() + "_" + id.getRatisPort();
+  }
+
+  static String toRaftPeerAddressString(DatanodeDetails id) {
+    return id.getIpAddress() + ":" + id.getRatisPort();
+  }
+
+  static RaftPeerId toRaftPeerId(DatanodeDetails id) {
+    return RaftPeerId.valueOf(toRaftPeerIdString(id));
+  }
+
+  static RaftPeer toRaftPeer(DatanodeDetails id) {
+    return new RaftPeer(toRaftPeerId(id), toRaftPeerAddressString(id));
+  }
+
+  static List<RaftPeer> toRaftPeers(Pipeline pipeline) {
+    return toRaftPeers(pipeline.getMachines());
+  }
+
+  static <E extends DatanodeDetails> List<RaftPeer> toRaftPeers(
+      List<E> datanodes) {
+    return datanodes.stream().map(RatisHelper::toRaftPeer)
+        .collect(Collectors.toList());
+  }
+
+  /* TODO: use a dummy id for all groups for the moment.
+   *       It should be changed to a unique id for each group.
+   */
+  RaftGroupId DUMMY_GROUP_ID =
+      RaftGroupId.valueOf(ByteString.copyFromUtf8("AOzoneRatisGroup"));
+
+  RaftGroup EMPTY_GROUP = new RaftGroup(DUMMY_GROUP_ID,
+      Collections.emptyList());
+
+  static RaftGroup emptyRaftGroup() {
+    return EMPTY_GROUP;
+  }
+
+  static RaftGroup newRaftGroup(List<DatanodeDetails> datanodes) {
+    final List<RaftPeer> newPeers = datanodes.stream()
+        .map(RatisHelper::toRaftPeer)
+        .collect(Collectors.toList());
+    return RatisHelper.newRaftGroup(newPeers);
+  }
+
+  static RaftGroup newRaftGroup(Collection<RaftPeer> peers) {
+    return peers.isEmpty()? emptyRaftGroup()
+        : new RaftGroup(DUMMY_GROUP_ID, peers);
+  }
+
+  static RaftGroup newRaftGroup(Pipeline pipeline) {
+    return newRaftGroup(toRaftPeers(pipeline));
+  }
+
+  static RaftClient newRaftClient(RpcType rpcType, Pipeline pipeline) {
+    return newRaftClient(rpcType, toRaftPeerId(pipeline.getLeader()),
+        newRaftGroup(pipeline));
+  }
+
+  static RaftClient newRaftClient(RpcType rpcType, RaftPeer leader) {
+    return newRaftClient(rpcType, leader.getId(),
+        newRaftGroup(new ArrayList<>(Arrays.asList(leader))));
+  }
+
+  static RaftClient newRaftClient(
+      RpcType rpcType, RaftPeerId leader, RaftGroup group) {
+    LOG.trace("newRaftClient: {}, leader={}, group={}", rpcType, leader, group);
+    final RaftProperties properties = new RaftProperties();
+    RaftConfigKeys.Rpc.setType(properties, rpcType);
+    GrpcConfigKeys.setMessageSizeMax(properties,
+        SizeInBytes.valueOf(OzoneConfigKeys.DFS_CONTAINER_CHUNK_MAX_SIZE));
+
+    return RaftClient.newBuilder()
+        .setRaftGroup(group)
+        .setLeaderId(leader)
+        .setProperties(properties)
+        .build();
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/ratis/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/package-info.java
new file mode 100644
index 0000000..c13c20c
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/ratis/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ratis;
+
+/**
+ * This package contains classes related to Apache Ratis.
+ */
diff --git a/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java
new file mode 100644
index 0000000..29242ad
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/ShadedProtoUtil.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.ratis.shaded.com.google.protobuf;
+
+/** Utilities for the shaded protobuf in Ratis. */
+public interface ShadedProtoUtil {
+  /**
+   * @param bytes
+   * @return the wrapped shaded {@link ByteString} (no coping).
+   */
+  static ByteString asShadedByteString(byte[] bytes) {
+    return ByteString.wrap(bytes);
+  }
+
+  /**
+   * @param shaded
+   * @return a {@link com.google.protobuf.ByteString} (require coping).
+   */
+  static com.google.protobuf.ByteString asByteString(ByteString shaded) {
+    return com.google.protobuf.ByteString.copyFrom(
+        shaded.asReadOnlyByteBuffer());
+  }
+}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java
new file mode 100644
index 0000000..032dd96
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/ratis/shaded/com/google/protobuf/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ratis.shaded.com.google.protobuf;
+
+/**
+ * This package contains classes related to the shaded protobuf in Apache Ratis.
+ */
diff --git a/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto b/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto
new file mode 100644
index 0000000..a6270ef
--- /dev/null
+++ b/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto
@@ -0,0 +1,415 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * These .proto interfaces are private and Unstable.
+ * Please see http://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-common/InterfaceClassification.html
+ * for what changes are allowed for a *Unstable* .proto interface.
+ */
+
+// This file contains protocol buffers that are used to transfer data
+// to and from the datanode.
+option java_package = "org.apache.hadoop.hdds.protocol.proto";
+option java_outer_classname = "ContainerProtos";
+option java_generate_equals_and_hash = true;
+package hadoop.hdds;
+import "hdfs.proto";
+import "hdds.proto";
+
+/**
+ * Commands that are used to manipulate the state of containers on a datanode.
+ *
+ * These commands allow us to work against the datanode - from
+ * StorageContainer Manager as well as clients.
+ *
+ *  1. CreateContainer - This call is usually made by Storage Container
+ *     manager, when we need to create a new container on a given datanode.
+ *
+ *  2. ReadContainer - Allows end user to stat a container. For example
+ *     this allows us to return the metadata of a container.
+ *
+ *  3. UpdateContainer - Updates a container metadata.
+
+ *  4. DeleteContainer - This call is made to delete a container.
+ *
+ *  5. ListContainer - Returns the list of containers on this
+ *     datanode. This will be used by tests and tools.
+ *
+ *  6. PutKey - Given a valid container, creates a key.
+ *
+ *  7. GetKey - Allows user to read the metadata of a Key.
+ *
+ *  8. DeleteKey - Deletes a given key.
+ *
+ *  9. ListKey - Returns a list of keys that are present inside
+ *      a given container.
+ *
+ *  10. ReadChunk - Allows us to read a chunk.
+ *
+ *  11. DeleteChunk - Delete an unused chunk.
+ *
+ *  12. WriteChunk - Allows us to write a chunk
+ *
+ *  13. ListChunk - Given a Container/Key returns the list of Chunks.
+ *
+ *  14. CompactChunk - Re-writes a chunk based on Offsets.
+ *
+ *  15. PutSmallFile - A single RPC that combines both putKey and WriteChunk.
+ *
+ *  16. GetSmallFile - A single RPC that combines both getKey and ReadChunk.
+ *
+ *  17. CloseContainer - Closes an open container and makes it immutable.
+ *
+ *  18. CopyContainer - Copies a container from a remote machine.
+ */
+
+enum Type {
+  CreateContainer = 1;
+  ReadContainer = 2;
+  UpdateContainer = 3;
+  DeleteContainer = 4;
+  ListContainer = 5;
+
+  PutKey = 6;
+  GetKey = 7;
+  DeleteKey = 8;
+  ListKey = 9;
+
+  ReadChunk = 10;
+  DeleteChunk = 11;
+  WriteChunk = 12;
+  ListChunk = 13;
+  CompactChunk = 14;
+
+  /** Combines Key and Chunk Operation into Single RPC. */
+  PutSmallFile = 15;
+  GetSmallFile = 16;
+  CloseContainer = 17;
+
+}
+
+
+enum Result {
+  SUCCESS = 1;
+  UNSUPPORTED_REQUEST = 2;
+  MALFORMED_REQUEST = 3;
+  CONTAINER_INTERNAL_ERROR = 4;
+  INVALID_CONFIG = 5;
+  INVALID_FILE_HASH_FOUND = 6;
+  CONTAINER_EXISTS = 7;
+  NO_SUCH_ALGORITHM = 8;
+  CONTAINER_NOT_FOUND = 9;
+  IO_EXCEPTION = 10;
+  UNABLE_TO_READ_METADATA_DB = 11;
+  NO_SUCH_KEY = 12;
+  OVERWRITE_FLAG_REQUIRED = 13;
+  UNABLE_TO_FIND_DATA_DIR = 14;
+  INVALID_WRITE_SIZE = 15;
+  CHECKSUM_MISMATCH = 16;
+  UNABLE_TO_FIND_CHUNK = 17;
+  PROTOC_DECODING_ERROR = 18;
+  INVALID_ARGUMENT = 19;
+  PUT_SMALL_FILE_ERROR = 20;
+  GET_SMALL_FILE_ERROR = 21;
+  CLOSED_CONTAINER_IO = 22;
+  ERROR_CONTAINER_NOT_EMPTY = 23;
+  ERROR_IN_COMPACT_DB = 24;
+  UNCLOSED_CONTAINER_IO = 25;
+  DELETE_ON_OPEN_CONTAINER = 26;
+  CLOSED_CONTAINER_RETRY = 27;
+}
+
+message ContainerCommandRequestProto {
+  required Type cmdType = 1; // Type of the command
+
+  // A string that identifies this command, we generate  Trace ID in Ozone
+  // frontend and this allows us to trace that command all over ozone.
+  optional string traceID = 2;
+
+  // One of the following command is available when the corresponding
+  // cmdType is set. At the protocol level we allow only
+  // one command in each packet.
+  // TODO : Upgrade to Protobuf 2.6 or later.
+  optional   CreateContainerRequestProto createContainer = 3;
+  optional   ReadContainerRequestProto readContainer = 4;
+  optional   UpdateContainerRequestProto updateContainer = 5;
+  optional   DeleteContainerRequestProto deleteContainer = 6;
+  optional   ListContainerRequestProto listContainer = 7;
+
+  optional   PutKeyRequestProto putKey = 8;
+  optional   GetKeyRequestProto getKey = 9;
+  optional   DeleteKeyRequestProto deleteKey = 10;
+  optional   ListKeyRequestProto listKey = 11;
+
+  optional   ReadChunkRequestProto readChunk = 12;
+  optional   WriteChunkRequestProto writeChunk = 13;
+  optional   DeleteChunkRequestProto deleteChunk = 14;
+  optional   ListChunkRequestProto listChunk = 15;
+
+  optional   PutSmallFileRequestProto putSmallFile = 16;
+  optional   GetSmallFileRequestProto getSmallFile = 17;
+  optional   CloseContainerRequestProto closeContainer = 18;
+  required   string datanodeUuid = 19;
+}
+
+message ContainerCommandResponseProto {
+  required Type cmdType = 1;
+  optional string traceID = 2;
+
+  optional   CreateContainerResponseProto createContainer = 3;
+  optional   ReadContainerResponseProto readContainer = 4;
+  optional   UpdateContainerResponseProto updateContainer = 5;
+  optional   DeleteContainerResponseProto deleteContainer = 6;
+  optional   ListContainerResponseProto listContainer = 7;
+
+  optional   PutKeyResponseProto putKey = 8;
+  optional   GetKeyResponseProto getKey = 9;
+  optional   DeleteKeyResponseProto deleteKey = 10;
+  optional   ListKeyResponseProto listKey = 11;
+
+  optional  WriteChunkResponseProto writeChunk = 12;
+  optional  ReadChunkResponseProto readChunk = 13;
+  optional  DeleteChunkResponseProto deleteChunk = 14;
+  optional  ListChunkResponseProto listChunk = 15;
+
+  required Result result = 17;
+  optional string message = 18;
+
+  optional PutSmallFileResponseProto putSmallFile = 19;
+  optional GetSmallFileResponseProto getSmallFile = 20;
+  optional CloseContainerResponseProto closeContainer = 21;
+
+}
+
+message ContainerData {
+  required string name = 1;
+  repeated KeyValue metadata = 2;
+  optional string dbPath = 3;
+  optional string containerPath = 4;
+  optional string hash = 6;
+  optional int64 bytesUsed = 7;
+  optional int64 size = 8;
+  optional int64 keyCount = 9;
+  //TODO: change required after we switch container ID from string to long
+  optional int64 containerID = 10;
+  optional LifeCycleState state = 11 [default = OPEN];
+}
+
+message ContainerMeta {
+  required string fileName = 1;
+  required string hash = 2;
+}
+
+// Container Messages.
+message  CreateContainerRequestProto {
+  required Pipeline pipeline = 1;
+  required ContainerData containerData = 2;
+}
+
+message  CreateContainerResponseProto {
+}
+
+message  ReadContainerRequestProto {
+  required Pipeline pipeline = 1;
+  required string name = 2;
+}
+
+message  ReadContainerResponseProto {
+  optional ContainerData containerData = 2;
+}
+
+message  UpdateContainerRequestProto {
+  required Pipeline pipeline = 1;
+  required ContainerData containerData = 2;
+  optional bool forceUpdate = 3 [default = false];
+}
+
+message  UpdateContainerResponseProto {
+}
+
+message  DeleteContainerRequestProto {
+  required Pipeline pipeline = 1;
+  required string name = 2;
+  optional bool forceDelete = 3 [default = false];
+}
+
+message  DeleteContainerResponseProto {
+}
+
+message  ListContainerRequestProto {
+  required Pipeline pipeline = 1;
+  optional string prefix = 2;
+  required uint32 count = 3; // Max Results to return
+  optional string prevKey = 4;  // if this is not set query from start.
+}
+
+message  ListContainerResponseProto {
+  repeated ContainerData containerData = 1;
+}
+
+message CloseContainerRequestProto {
+  required Pipeline pipeline = 1;
+}
+
+message CloseContainerResponseProto {
+  optional Pipeline pipeline = 1;
+  optional string hash = 2;
+}
+
+message KeyData {
+  required string containerName = 1;
+  required string name = 2;
+  optional int64 flags = 3; // for future use.
+  repeated KeyValue metadata = 4;
+  repeated ChunkInfo chunks = 5;
+}
+
+// Key Messages.
+message  PutKeyRequestProto {
+  required Pipeline pipeline = 1;
+  required KeyData keyData = 2;
+}
+
+message  PutKeyResponseProto {
+}
+
+message  GetKeyRequestProto  {
+  required Pipeline pipeline = 1;
+  required KeyData keyData = 2;
+}
+
+message  GetKeyResponseProto  {
+  required KeyData keyData = 1;
+}
+
+
+message  DeleteKeyRequestProto {
+  required Pipeline pipeline = 1;
+  required string name = 2;
+}
+
+message   DeleteKeyResponseProto {
+}
+
+message  ListKeyRequestProto {
+  required Pipeline pipeline = 1;
+  optional string prefix = 2; // if specified returns keys that match prefix.
+  required string prevKey = 3;
+  required uint32 count = 4;
+
+}
+
+message  ListKeyResponseProto {
+  repeated KeyData keyData = 1;
+}
+
+// Chunk Operations
+
+message ChunkInfo {
+  required string chunkName = 1;
+  required uint64 offset = 2;
+  required uint64 len = 3;
+  optional string checksum = 4;
+  repeated KeyValue metadata = 5;
+}
+
+enum Stage {
+    WRITE_DATA = 1;
+    COMMIT_DATA = 2;
+    COMBINED = 3;
+}
+
+message  WriteChunkRequestProto  {
+  required Pipeline pipeline = 1;
+  required string keyName = 2;
+  required ChunkInfo chunkData = 3;
+  optional bytes data = 4;
+  optional Stage stage = 5 [default = COMBINED];
+}
+
+message  WriteChunkResponseProto {
+}
+
+message  ReadChunkRequestProto  {
+  required Pipeline pipeline = 1;
+  required string keyName = 2;
+  required ChunkInfo chunkData = 3;
+}
+
+message  ReadChunkResponseProto {
+  required Pipeline pipeline = 1;
+  required ChunkInfo chunkData = 2;
+  required bytes data = 3;
+}
+
+message  DeleteChunkRequestProto {
+  required Pipeline pipeline = 1;
+  required string keyName = 2;
+  required ChunkInfo chunkData = 3;
+}
+
+message  DeleteChunkResponseProto {
+}
+
+message  ListChunkRequestProto {
+  required Pipeline pipeline = 1;
+  required string keyName = 2;
+  required string prevChunkName = 3;
+  required uint32 count = 4;
+}
+
+message  ListChunkResponseProto {
+  repeated ChunkInfo chunkData = 1;
+}
+
+/** For small file access combines write chunk and putKey into a single
+RPC */
+
+message PutSmallFileRequestProto {
+  required PutKeyRequestProto key = 1;
+  required ChunkInfo chunkInfo = 2;
+  required bytes data = 3;
+}
+
+
+message PutSmallFileResponseProto {
+
+}
+
+message GetSmallFileRequestProto {
+  required GetKeyRequestProto key = 1;
+}
+
+message GetSmallFileResponseProto {
+  required ReadChunkResponseProto data = 1;
+}
+
+message CopyContainerRequestProto {
+  required string containerName = 1;
+  required uint64 readOffset = 2;
+  optional uint64 len = 3;
+}
+
+message CopyContainerResponseProto {
+  required string archiveName = 1;
+  required uint64 readOffset = 2;
+  required uint64 len = 3;
+  required bool eof = 4;
+  repeated bytes data = 5;
+  optional int64 checksum = 6;
+}
diff --git a/hadoop-hdds/common/src/main/proto/ScmBlockLocationProtocol.proto b/hadoop-hdds/common/src/main/proto/ScmBlockLocationProtocol.proto
new file mode 100644
index 0000000..38d2e16
--- /dev/null
+++ b/hadoop-hdds/common/src/main/proto/ScmBlockLocationProtocol.proto
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * These .proto interfaces are private and unstable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *unstable* .proto interface.
+ */
+
+option java_package = "org.apache.hadoop.hdds.protocol.proto";
+option java_outer_classname = "ScmBlockLocationProtocolProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+package hadoop.hdds;
+
+import "hdfs.proto";
+import "hdds.proto";
+
+
+// SCM Block protocol
+/**
+ * keys - batch of block keys to find
+ */
+message GetScmBlockLocationsRequestProto {
+  repeated string keys = 1;
+}
+
+/**
+ * locatedBlocks - for each requested hash, nodes that currently host the
+ *     container for that object key hash
+ */
+message GetScmBlockLocationsResponseProto {
+  repeated ScmLocatedBlockProto locatedBlocks = 1;
+}
+
+/**
+ * Holds the nodes that currently host the blocks for a key.
+ */
+message ScmLocatedBlockProto {
+  required string key = 1;
+  required hadoop.hdds.Pipeline pipeline = 2;
+}
+
+/**
+* Request send to SCM asking allocate block of specified size.
+*/
+message AllocateScmBlockRequestProto {
+  required uint64 size = 1;
+  required ReplicationType type = 2;
+  required hadoop.hdds.ReplicationFactor factor = 3;
+  required string owner = 4;
+
+}
+
+/**
+ * A delete key request sent by KSM to SCM, it contains
+ * multiple number of keys (and their blocks).
+ */
+message DeleteScmKeyBlocksRequestProto {
+  repeated KeyBlocks keyBlocks = 1;
+}
+
+/**
+ * A object key and all its associated blocks.
+ * We need to encapsulate object key name plus the blocks in this potocol
+ * because SCM needs to response KSM with the keys it has deleted.
+ * If the response only contains blocks, it will be very expensive for
+ * KSM to figure out what keys have been deleted.
+ */
+message KeyBlocks {
+  required string key = 1;
+  repeated string blocks = 2;
+}
+
+/**
+ * A delete key response from SCM to KSM, it contains multiple child-results.
+ * Each child-result represents a key deletion result, only if all blocks of
+ * a key are successfully deleted, this key result is considered as succeed.
+ */
+message DeleteScmKeyBlocksResponseProto {
+  repeated DeleteKeyBlocksResultProto results = 1;
+}
+
+/**
+ * A key deletion result. It contains all the block deletion results.
+ */
+message DeleteKeyBlocksResultProto {
+  required string objectKey = 1;
+  repeated DeleteScmBlockResult blockResults = 2;
+}
+
+message DeleteScmBlockResult {
+  enum Result {
+    success = 1;
+    chillMode = 2;
+    errorNotFound = 3;
+    unknownFailure = 4;
+  }
+  required Result result = 1;
+  required string key = 2;
+}
+
+/**
+ * Reply from SCM indicating that the container.
+ */
+message AllocateScmBlockResponseProto {
+  enum Error {
+    success = 1;
+    errorNotEnoughSpace = 2;
+    errorSizeTooBig = 3;
+    unknownFailure = 4;
+  }
+  required Error errorCode = 1;
+  required string key = 2;
+  required hadoop.hdds.Pipeline pipeline = 3;
+  required bool createContainer = 4;
+  optional string errorMessage = 5;
+}
+
+/**
+ * Protocol used from KeySpaceManager to StorageContainerManager.
+ * See request and response messages for details of the RPC calls.
+ */
+service ScmBlockLocationProtocolService {
+
+  /**
+   * Find the set of nodes that currently host the block, as
+   * identified by the key.  This method supports batch lookup by
+   * passing multiple keys.
+   */
+  rpc getScmBlockLocations(GetScmBlockLocationsRequestProto)
+      returns (GetScmBlockLocationsResponseProto);
+
+  /**
+   * Creates a block entry in SCM.
+   */
+  rpc allocateScmBlock(AllocateScmBlockRequestProto)
+      returns (AllocateScmBlockResponseProto);
+
+  /**
+   * Deletes blocks for a set of object keys from SCM.
+   */
+  rpc deleteScmKeyBlocks(DeleteScmKeyBlocksRequestProto)
+      returns (DeleteScmKeyBlocksResponseProto);
+
+  /**
+   * Gets the scmInfo from SCM.
+   */
+  rpc getScmInfo(hadoop.hdds.GetScmInfoRequestProto)
+      returns (hadoop.hdds.GetScmInfoRespsonseProto);
+}
diff --git a/hadoop-hdds/common/src/main/proto/StorageContainerLocationProtocol.proto b/hadoop-hdds/common/src/main/proto/StorageContainerLocationProtocol.proto
new file mode 100644
index 0000000..d7540a3
--- /dev/null
+++ b/hadoop-hdds/common/src/main/proto/StorageContainerLocationProtocol.proto
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * These .proto interfaces are private and unstable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *unstable* .proto interface.
+ */
+
+option java_package = "org.apache.hadoop.hdds.protocol.proto";
+option java_outer_classname = "StorageContainerLocationProtocolProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+package hadoop.hdds;
+
+import "hdfs.proto";
+import "hdds.proto";
+
+/**
+* Request send to SCM asking where the container should be created.
+*/
+message ContainerRequestProto {
+  required string containerName = 1;
+  // Ozone only support replciation of either 1 or 3.
+  required ReplicationFactor replicationFactor = 2;
+  required ReplicationType  replicationType = 3;
+  required string owner = 4;
+
+}
+
+/**
+ * Reply from SCM indicating that the container.
+ */
+message ContainerResponseProto {
+  enum Error {
+    success = 1;
+    errorContainerAlreadyExists = 2;
+    errorContainerMissing = 3;
+  }
+  required Error errorCode = 1;
+  required Pipeline pipeline = 2;
+  optional string errorMessage = 3;
+}
+
+message GetContainerRequestProto {
+  required string containerName = 1;
+}
+
+message GetContainerResponseProto {
+  required Pipeline pipeline = 1;
+}
+
+message SCMListContainerRequestProto {
+  required uint32 count = 1;
+  optional string startName = 2;
+  optional string prefixName = 3;
+}
+
+message SCMListContainerResponseProto {
+  repeated SCMContainerInfo containers = 1;
+}
+
+message SCMDeleteContainerRequestProto {
+  required string containerName = 1;
+}
+
+message SCMDeleteContainerResponseProto {
+  // Empty response
+}
+
+message ObjectStageChangeRequestProto {
+  enum Type {
+    container = 1;
+    pipeline = 2;
+  }
+  // delete/copy operation may be added later
+  enum Op {
+    create = 1;
+    close = 2;
+  }
+  enum Stage {
+    begin = 1;
+    complete = 2;
+  }
+  required string name = 1;
+  required Type type = 2;
+  required Op op= 3;
+  required Stage stage = 4;
+}
+
+message ObjectStageChangeResponseProto {
+  // Empty response
+}
+
+/*
+ NodeQueryRequest sends a request to SCM asking to send a list of nodes that
+ match the NodeState that we are requesting.
+*/
+message NodeQueryRequestProto {
+
+
+  // Repeated, So we can specify more than one status type.
+  // These NodeState types are additive for now, in the sense that
+  // if you specify HEALTHY and FREE_NODE members --
+  // Then you get all healthy node which are not raft members.
+  //
+  // if you specify all healthy and dead nodes, you will get nothing
+  // back. Server is not going to dictate what combinations make sense,
+  // it is entirely up to the caller.
+  // TODO: Support operators like OR and NOT. Currently it is always an
+  // implied AND.
+
+  repeated NodeState query = 1;
+  required QueryScope scope = 2;
+  optional string poolName = 3; // if scope is pool, then pool name is needed.
+}
+
+message NodeQueryResponseProto {
+  required NodePool datanodes = 1;
+}
+
+/**
+  Request to create a replication pipeline.
+ */
+message PipelineRequestProto {
+  required ReplicationType replicationType = 1;
+  required ReplicationFactor replicationFactor = 2;
+
+  // if datanodes are specified then pipelines are created using those
+  // datanodes.
+  optional NodePool nodePool = 3;
+  optional string pipelineID = 4;
+}
+
+message  PipelineResponseProto {
+  enum Error {
+    success = 1;
+    errorPipelineAlreadyExists = 2;
+  }
+  required Error errorCode = 1;
+  optional Pipeline  pipeline = 2;
+  optional string errorMessage = 3;
+}
+
+/**
+ * Protocol used from an HDFS node to StorageContainerManager.  See the request
+ * and response messages for details of the RPC calls.
+ */
+service StorageContainerLocationProtocolService {
+
+  /**
+   * Creates a container entry in SCM.
+   */
+  rpc allocateContainer(ContainerRequestProto) returns (ContainerResponseProto);
+
+  /**
+   * Returns the pipeline for a given container.
+   */
+  rpc getContainer(GetContainerRequestProto) returns (GetContainerResponseProto);
+
+  rpc listContainer(SCMListContainerRequestProto) returns (SCMListContainerResponseProto);
+
+  /**
+   * Deletes a container in SCM.
+   */
+  rpc deleteContainer(SCMDeleteContainerRequestProto) returns (SCMDeleteContainerResponseProto);
+
+  /**
+  * Returns a set of Nodes that meet a criteria.
+  */
+  rpc queryNode(NodeQueryRequestProto)  returns (NodeQueryResponseProto);
+
+  /**
+  * Notify from client when begin or finish container or pipeline operations on datanodes.
+  */
+  rpc notifyObjectStageChange(ObjectStageChangeRequestProto) returns (ObjectStageChangeResponseProto);
+
+  /*
+  *  Apis that Manage Pipelines.
+  *
+  * Pipelines are abstractions offered by SCM and Datanode that allows users
+  * to create a replication pipeline.
+  *
+  *  These following APIs allow command line programs like SCM CLI to list
+  * and manage pipelines.
+  */
+
+  /**
+  *  Creates a replication pipeline.
+  */
+  rpc allocatePipeline(PipelineRequestProto)
+      returns (PipelineResponseProto);
+
+  /**
+  *  Returns information about SCM.
+  */
+  rpc getScmInfo(GetScmInfoRequestProto)
+      returns (GetScmInfoRespsonseProto);
+}
diff --git a/hadoop-hdds/common/src/main/proto/hdds.proto b/hadoop-hdds/common/src/main/proto/hdds.proto
new file mode 100644
index 0000000..0b650b4
--- /dev/null
+++ b/hadoop-hdds/common/src/main/proto/hdds.proto
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * These .proto interfaces are private and unstable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *unstable* .proto interface.
+ */
+
+option java_package = "org.apache.hadoop.hdds.protocol.proto";
+option java_outer_classname = "HddsProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+package hadoop.hdds;
+
+message DatanodeDetailsProto {
+    // TODO: make the port as a seperate proto message and use it here
+    required string uuid = 1;  // UUID assigned to the Datanode.
+    required string ipAddress = 2;     // IP address
+    required string hostName = 3;      // hostname
+    optional uint32 containerPort = 4 [default = 0];  // Ozone stand_alone protocol
+    optional uint32 ratisPort = 5 [default = 0];      //Ozone ratis port
+    optional uint32 ozoneRestPort = 6 [default = 0];
+}
+
+message PipelineChannel {
+    required string leaderID = 1;
+    repeated DatanodeDetailsProto members = 2;
+    optional LifeCycleState state = 3 [default = OPEN];
+    optional ReplicationType type = 4 [default = STAND_ALONE];
+    optional ReplicationFactor factor = 5 [default = ONE];
+    optional string name = 6;
+}
+
+// A pipeline is composed of PipelineChannel (Ratis/StandAlone) that back a
+// container.
+message Pipeline {
+    required string containerName = 1;
+    required PipelineChannel pipelineChannel = 2;
+}
+
+message KeyValue {
+    required string key = 1;
+    optional string value = 2;
+}
+
+/**
+ * Type of the node.
+ */
+enum NodeType {
+    KSM = 1;
+    SCM = 2;
+    DATANODE = 3;
+}
+
+// Should we rename NodeState to DatanodeState?
+/**
+ * Enum that represents the Node State. This is used in calls to getNodeList
+ * and getNodeCount.
+ */
+enum NodeState {
+    HEALTHY             = 1;
+    STALE               = 2;
+    DEAD                = 3;
+    DECOMMISSIONING     = 4;
+    DECOMMISSIONED      = 5;
+    RAFT_MEMBER         = 6;
+    FREE_NODE           = 7; // Not a member in raft.
+    INVALID             = 8;
+}
+
+enum QueryScope {
+    CLUSTER = 1;
+    POOL = 2;
+}
+
+message Node {
+    required DatanodeDetailsProto nodeID = 1;
+    repeated NodeState nodeStates = 2;
+}
+
+message NodePool {
+    repeated Node nodes = 1;
+}
+
+/**
+ * LifeCycleState for SCM object creation state machine:
+ *    ->Allocated: allocated on SCM but clean has not started creating it yet.
+ *    ->Creating: allocated and assigned to client to create but not ack-ed yet.
+ *    ->Open: allocated on SCM and created on datanodes and ack-ed by a client.
+ *    ->Close: container closed due to space all used or error?
+ *    ->Timeout -> container failed to create on datanodes or ack-ed by client.
+ *    ->Deleting(TBD) -> container will be deleted after timeout
+ * 1. ALLOCATE-ed containers on SCM can't serve key/block related operation
+ *    until ACK-ed explicitly which changes the state to OPEN.
+ * 2. Only OPEN/CLOSED containers can serve key/block related operation.
+ * 3. ALLOCATE-ed containers that are not ACK-ed timely will be TIMEOUT and
+ *    CLEANUP asynchronously.
+ */
+
+enum LifeCycleState {
+    ALLOCATED = 1;
+    CREATING = 2; // Used for container allocated/created by different client.
+    OPEN =3; // Mostly an update to SCM via HB or client call.
+    CLOSING = 4;
+    CLOSED = 5; // !!State after this has not been used yet.
+    DELETING = 6;
+    DELETED = 7; // object is deleted.
+}
+
+enum LifeCycleEvent {
+    CREATE = 1; // A request to client to create this object
+    CREATED = 2;
+    FINALIZE = 3;
+    CLOSE = 4; // !!Event after this has not been used yet.
+    UPDATE = 5;
+    TIMEOUT = 6; // creation has timed out from SCM's View.
+    DELETE = 7;
+    CLEANUP = 8;
+}
+
+message SCMContainerInfo {
+    // TODO : Remove the container name from pipeline.
+    required string containerName = 1;
+    required LifeCycleState state = 2;
+    required Pipeline pipeline = 3;
+    // This is not total size of container, but space allocated by SCM for
+    // clients to write blocks
+    required uint64 allocatedBytes = 4;
+    required uint64 usedBytes = 5;
+    required uint64 numberOfKeys = 6;
+    optional int64 stateEnterTime = 7;
+    required string owner = 8;
+    required int64 containerID = 9;
+}
+
+message GetScmInfoRequestProto {
+}
+
+message GetScmInfoRespsonseProto {
+    required string clusterId = 1;
+    required string scmId = 2;
+}
+
+
+enum ReplicationType {
+    RATIS = 1;
+    STAND_ALONE = 2;
+    CHAINED = 3;
+}
+
+enum ReplicationFactor {
+    ONE = 1;
+    THREE = 3;
+}
diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml
new file mode 100644
index 0000000..cb0ab18
--- /dev/null
+++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml
@@ -0,0 +1,1049 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<!-- Do not modify this file directly.  Instead, copy entries that you -->
+<!-- wish to modify from this file into ozone-site.xml and change them -->
+<!-- there.  If ozone-site.xml does not already exist, create it.      -->
+
+<!--Tags supported are OZONE, CBLOCK, MANAGEMENT, SECURITY, PERFORMANCE,   -->
+<!--DEBUG, CLIENT, SERVER, KSM, SCM, CRITICAL, RATIS, CONTAINER, REQUIRED, -->
+<!--REST, STORAGE, PIPELINE, STANDALONE                                    -->
+
+<configuration>
+
+  <!--Container Settings used by Datanode-->
+  <property>
+    <name>ozone.container.cache.size</name>
+    <value>1024</value>
+    <tag>PERFORMANCE, CONTAINER, STORAGE</tag>
+    <description>The open container is cached on the data node side. We maintain
+      an LRU
+      cache for caching the recently used containers. This setting controls the
+      size of that cache.
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ipc</name>
+    <value>9859</value>
+    <tag>OZONE, CONTAINER, MANAGEMENT</tag>
+    <description>The ipc port number of container.</description>
+  </property>
+  <property>
+    <name>dfs.container.ipc.random.port</name>
+    <value>false</value>
+    <tag>OZONE, DEBUG, CONTAINER</tag>
+    <description>Allocates a random free port for ozone container. This is used
+      only while
+      running unit tests.
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.datanode.storage.dir</name>
+    <value/>
+    <tag>OZONE, CONTAINER, STORAGE, MANAGEMENT, RATIS</tag>
+    <description>This directory is used for storing Ratis metadata like logs. If
+      this is
+      not set then default metadata dirs is used. A warning will be logged if
+      this not set. Ideally, this should be mapped to a fast disk like an SSD.
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.enabled</name>
+    <value>false</value>
+    <tag>OZONE, MANAGEMENT, PIPELINE, RATIS</tag>
+    <description>Ozone supports different kinds of replication pipelines. Ratis
+      is one of
+      the replication pipeline supported by ozone.
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.ipc</name>
+    <value>9858</value>
+    <tag>OZONE, CONTAINER, PIPELINE, RATIS, MANAGEMENT</tag>
+    <description>The ipc port number of container.</description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.ipc.random.port</name>
+    <value>false</value>
+    <tag>OZONE,DEBUG</tag>
+    <description>Allocates a random free port for ozone ratis port for the
+      container. This
+      is used only while running unit tests.
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.rpc.type</name>
+    <value>GRPC</value>
+    <tag>OZONE, RATIS, MANAGEMENT</tag>
+    <description>Ratis supports different kinds of transports like netty, GRPC,
+      Hadoop RPC
+      etc. This picks one of those for this cluster.
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.num.write.chunk.threads</name>
+    <value>60</value>
+    <tag>OZONE, RATIS, PERFORMANCE</tag>
+    <description>Maximum number of threads in the thread pool that Ratis
+      will use for writing chunks (60 by default).
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.segment.size</name>
+    <value>1073741824</value>
+    <tag>OZONE, RATIS, PERFORMANCE</tag>
+    <description>The size of the raft segment used by Apache Ratis on datanodes.
+      (1 GB by default)
+    </description>
+  </property>
+  <property>
+    <name>dfs.container.ratis.segment.preallocated.size</name>
+    <value>134217728</value>
+    <tag>OZONE, RATIS, PERFORMANCE</tag>
+    <description>The size of the buffer which is preallocated for raft segment
+      used by Apache Ratis on datanodes.(128 MB by default)
+    </description>
+  </property>
+  <property>
+    <name>ozone.container.report.interval</name>
+    <value>60000ms</value>
+    <tag>OZONE, CONTAINER, MANAGEMENT</tag>
+    <description>Time interval of the datanode to send container report. Each
+      datanode periodically send container report upon receive
+      sendContainerReport from SCM. Unit could be defined with
+      postfix (ns,ms,s,m,h,d)</description>
+  </property>
+  <!--Ozone Settings-->
+  <property>
+    <name>ozone.administrators</name>
+    <value/>
+    <tag>OZONE, SECURITY</tag>
+    <description>Ozone administrator users delimited by the comma.
+      If not set, only the user who launches an ozone service will be the admin
+      user. This property must be set if ozone services are started by different
+      users. Otherwise, the RPC layer will reject calls from other servers which
+      are started by users not in the list.
+    </description>
+  </property>
+  <property>
+    <name>ozone.block.deleting.container.limit.per.interval</name>
+    <value>10</value>
+    <tag>OZONE, PERFORMANCE, SCM</tag>
+    <description>A maximum number of containers to be scanned by block deleting
+      service per
+      time interval. The block deleting service spawns a thread to handle block
+      deletions in a container. This property is used to throttle the number of
+      threads spawned for block deletions.
+    </description>
+  </property>
+  <property>
+    <name>ozone.block.deleting.limit.per.task</name>
+    <value>1000</value>
+    <tag>OZONE, PERFORMANCE, SCM</tag>
+    <description>A maximum number of blocks to be deleted by block deleting
+      service per
+      time interval. This property is used to throttle the actual number of
+      block deletions on a data node per container.
+    </description>
+  </property>
+  <property>
+    <name>ozone.block.deleting.service.interval</name>
+    <value>1m</value>
+    <tag>OZONE, PERFORMANCE, SCM</tag>
+    <description>Time interval of the block deleting service.
+      The block deleting service runs on each datanode periodically and
+      deletes blocks queued for deletion. Unit could be defined with
+      postfix (ns,ms,s,m,h,d)
+    </description>
+  </property>
+  <property>
+    <name>ozone.block.deleting.service.timeout</name>
+    <value>300000ms</value>
+    <tag>OZONE, PERFORMANCE, SCM</tag>
+    <description>A timeout value of block deletion service. If this is set
+      greater than 0,
+      the service will stop waiting for the block deleting completion after this
+      time. If timeout happens to a large proportion of block deletion, this
+      needs to be increased with ozone.block.deleting.limit.per.task. This
+      setting supports multiple time unit suffixes as described in
+      dfs.heartbeat.interval. If no suffix is specified, then milliseconds is
+      assumed.
+    </description>
+  </property>
+  <property>
+    <name>ozone.client.connection.timeout</name>
+    <value>5000ms</value>
+    <tag>OZONE, PERFORMANCE, CLIENT</tag>
+    <description>Connection timeout for Ozone client in milliseconds.
+    </description>
+  </property>
+  <property>
+    <name>ozone.client.protocol</name>
+    <value>org.apache.hadoop.ozone.client.rpc.RpcClient</value>
+    <tag>OZONE, CLIENT, MANAGEMENT</tag>
+    <description>Protocol class to be used by the client to connect to ozone
+      cluster.
+      The build-in implementation includes:
+      org.apache.hadoop.ozone.client.rpc.RpcClient for RPC
+      org.apache.hadoop.ozone.client.rest.RestClient for REST
+      The default is the RpClient. Please do not change this unless you have a
+      very good understanding of what you are doing.
+    </description>
+  </property>
+  <property>
+    <name>ozone.client.socket.timeout</name>
+    <value>5000ms</value>
+    <tag>OZONE, CLIENT</tag>
+    <description>Socket timeout for Ozone client. Unit could be defined with
+      postfix (ns,ms,s,m,h,d)</description>
+  </property>
+  <property>
+    <name>ozone.enabled</name>
+    <value>false</value>
+    <tag>OZONE, REQUIRED</tag>
+    <description>
+      Status of the Ozone Object Storage service is enabled.
+      Set to true to enable Ozone.
+      Set to false to disable Ozone.
+      Unless this value is set to true, Ozone services will not be started in
+      the cluster.
+
+      Please note: By default ozone is disabled on a hadoop cluster.
+    </description>
+  </property>
+  <property>
+    <name>ozone.handler.type</name>
+    <value>distributed</value>
+    <tag>OZONE, REST</tag>
+    <description>
+      Tells ozone which storage handler to use. The possible values are:
+      distributed - The Ozone distributed storage handler, which speaks to
+      KSM/SCM on the backend and provides REST services to clients.
+      local - Local Storage handler strictly for testing - To be removed.
+    </description>
+  </property>
+  <property>
+    <name>ozone.key.deleting.limit.per.task</name>
+    <value>1000</value>
+    <tag>KSM, PERFORMANCE</tag>
+    <description>
+      A maximum number of keys to be scanned by key deleting service
+      per time interval in KSM. Those keys are sent to delete metadata and
+      generate transactions in SCM for next async deletion between SCM
+      and DataNode.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.address</name>
+    <value/>
+    <tag>KSM, REQUIRED</tag>
+    <description>
+      The address of the Ozone KSM service. This allows clients to discover
+      the KSMs address.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.group.rights</name>
+    <value>READ_WRITE</value>
+    <tag>KSM, SECURITY</tag>
+    <description>
+      Default group permissions in Ozone KSM.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.handler.count.key</name>
+    <value>20</value>
+    <tag>KSM, PERFORMANCE</tag>
+    <description>
+      The number of RPC handler threads for KSM service endpoints.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.http-address</name>
+    <value>0.0.0.0:9874</value>
+    <tag>KSM, MANAGEMENT</tag>
+    <description>
+      The address and the base port where the KSM web UI will listen on.
+
+      If the port is 0, then the server will start on a free port. However, it
+      is best to specify a well-known port, so it is easy to connect and see
+      the KSM management UI.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.http-bind-host</name>
+    <value>0.0.0.0</value>
+    <tag>KSM, MANAGEMENT</tag>
+    <description>
+      The actual address the KSM web server will bind to. If this optional
+      the address is set, it overrides only the hostname portion of
+      ozone.ksm.http-address.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.http.enabled</name>
+    <value>true</value>
+    <tag>KSM, MANAGEMENT</tag>
+    <description>
+      Property to enable or disable KSM web user interface.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.https-address</name>
+    <value>0.0.0.0:9875</value>
+    <tag>KSM, MANAGEMENT, SECURITY</tag>
+    <description>
+      The address and the base port where the KSM web UI will listen
+      on using HTTPS.
+      If the port is 0 then the server will start on a free port.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.https-bind-host</name>
+    <value>0.0.0.0</value>
+    <tag>KSM, MANAGEMENT, SECURITY</tag>
+    <description>
+      The actual address the KSM web server will bind to using HTTPS.
+      If this optional address is set, it overrides only the hostname portion of
+      ozone.ksm.http-address.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.keytab.file</name>
+    <value/>
+    <tag>KSM, SECURITY</tag>
+    <description>
+      The keytab file for Kerberos authentication in KSM.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.db.cache.size.mb</name>
+    <value>128</value>
+    <tag>KSM, PERFORMANCE</tag>
+    <description>
+      The size of KSM DB cache in MB that used for caching files.
+      This value is set to an abnormally low value in the default configuration.
+      That is to make unit testing easy. Generally, this value should be set to
+      something like 16GB or more, if you intend to use Ozone at scale.
+
+      A large value for this key allows a proportionally larger amount of KSM
+      metadata to be cached in memory. This makes KSM operations faster.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.user.max.volume</name>
+    <value>1024</value>
+    <tag>KSM, MANAGEMENT</tag>
+    <description>
+      The maximum number of volumes a user can have on a cluster.Increasing or
+      decreasing this number has no real impact on ozone cluster. This is
+      defined only for operational purposes. Only an administrator can create a
+      volume, once a volume is created there are no restrictions on the number
+      of buckets or keys inside each bucket a user can create.
+    </description>
+  </property>
+  <property>
+    <name>ozone.ksm.user.rights</name>
+    <value>READ_WRITE</value>
+    <tag>KSM, SECURITY</tag>
+    <description>
+      Default user permissions used in KSM.
+    </description>
+  </property>
+  <property>
+    <name>ozone.localstorage.root</name>
+    <value>${hadoop.tmp.dir}/ozone</value>
+    <tag>OZONE, DEBUG</tag>
+    <description>
+      This is used only for testing purposes. This value is used by the local
+      storage handler to simulate a REST backend. This is useful only when
+      debugging the REST front end independent of KSM and SCM. To be removed.
+    </description>
+  </property>
+  <property>
+    <name>ozone.metadata.dirs</name>
+    <value/>
+    <tag>OZONE, KSM, SCM, CONTAINER, REQUIRED, STORAGE</tag>
+    <description>
+      Ozone metadata is shared among KSM, which acts as the namespace
+      manager for ozone, SCM which acts as the block manager and data nodes
+      which maintain the name of the key(Key Name and BlockIDs). This
+      replicated and distributed metadata store is maintained under the
+      directory pointed by this key. Since metadata can be I/O intensive, at
+      least on KSM and SCM we recommend having SSDs. If you have the luxury
+      of mapping this path to SSDs on all machines in the cluster, that will
+      be excellent.
+
+      If Ratis metadata directories are not specified, Ratis server will emit a
+      warning and use this path for storing its metadata too.
+    </description>
+  </property>
+  <property>
+    <name>ozone.metastore.impl</name>
+    <value>RocksDB</value>
+    <tag>OZONE, KSM, SCM, CONTAINER, STORAGE</tag>
+    <description>
+      Ozone metadata store implementation. Ozone metadata are well
+      distributed to multiple services such as ksm, scm. They are stored in
+      some local key-value databases. This property determines which database
+      library to use. Supported value is either LevelDB or RocksDB.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.metastore.rocksdb.statistics</name>
+    <value>ALL</value>
+    <tag>OZONE, KSM, SCM, STORAGE, PERFORMANCE</tag>
+    <description>
+      The statistics level of the rocksdb store. If you use any value from
+      org.rocksdb.StatsLevel (eg. ALL or EXCEPT_DETAILED_TIMERS), the rocksdb
+      statistics will be exposed over JMX bean with the choosed setting. Set
+      it to OFF to not initialize rocksdb statistics at all. Please note that
+      collection of statistics could have 5-10% performance penalty.
+      Check the rocksdb documentation for more details.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.scm.block.client.address</name>
+    <value/>
+    <tag>OZONE, SCM</tag>
+    <description>The address of the Ozone SCM block client service. If not
+      defined value of ozone.scm.client.address is used.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.block.client.bind.host</name>
+    <value>0.0.0.0</value>
+    <tag>OZONE, SCM</tag>
+    <description>
+      The hostname or IP address used by the SCM block client
+      endpoint to bind.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.block.client.port</name>
+    <value>9863</value>
+    <tag>OZONE, SCM</tag>
+    <description>
+      The port number of the Ozone SCM block client service.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.block.deletion.max.retry</name>
+    <value>4096</value>
+    <tag>OZONE, SCM</tag>
+    <description>
+      SCM wraps up many blocks in a deletion transaction and sends that to data
+      node for physical deletion periodically. This property determines how many
+      times SCM is going to retry sending a deletion operation to the data node.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.block.size.in.mb</name>
+    <value>256</value>
+    <tag>OZONE, SCM</tag>
+    <description>
+      The default size of a scm block in bytes. This is maps to the default
+      Ozone block size.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.chunk.size</name>
+    <value>16777216</value>
+    <tag>OZONE, SCM, CONTAINER, PERFORMANCE</tag>
+    <description>
+      The chunk size for reading/writing chunk operations in bytes.
+
+      The chunk size defaults to 8MB. If the value configured is more than the
+      maximum size (16MB), it will be reset to the maximum size. This maps to
+      the network packet sizes and file write operations in the client to
+      datanode protocol.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.client.address</name>
+    <value/>
+    <tag>OZONE, SCM, REQUIRED</tag>
+    <description>
+      The address of the Ozone SCM client service. This is a required setting.
+
+      It is a string in the host:port format. The port number is optional
+      and defaults to 9860.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.client.bind.host</name>
+    <value>0.0.0.0</value>
+    <tag>OZONE, SCM, MANAGEMENT</tag>
+    <description>The hostname or IP address used by the SCM client endpoint to
+      bind.
+      This setting is used by the SCM only and never used by clients.
+
+      The setting can be useful in multi-homed setups to restrict the
+      availability of the SCM client service to a specific interface.
+
+      The default is appropriate for most clusters.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.client.port</name>
+    <value>9860</value>
+    <tag>OZONE, SCM, MANAGEMENT</tag>
+    <description>The port number of the Ozone SCM client service.</description>
+  </property>
+  <property>
+    <name>ozone.scm.container.deletion-choosing.policy</name>
+    <value>
+      org.apache.hadoop.ozone.container.common.impl.TopNOrderedContainerDeletionChoosingPolicy
+    </value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The policy used for choosing desire containers for block deletion.
+      Datanode selects some containers to process block deletion
+      in a certain interval defined by ozone.block.deleting.service.interval.
+      The number of containers to process in each interval is defined
+      by ozone.block.deleting.container.limit.per.interval. This property is
+      used to configure the policy applied while selecting containers.
+      There are two policies supporting now:
+      RandomContainerDeletionChoosingPolicy and
+      TopNOrderedContainerDeletionChoosingPolicy.
+      org.apache.hadoop.ozone.container.common.impl.RandomContainerDeletionChoosingPolicy
+      implements a simply random policy that to return a random list of
+      containers.
+      org.apache.hadoop.ozone.container.common.impl.TopNOrderedContainerDeletionChoosingPolicy
+      implements a policy that choosing top count number of containers in a
+      pending-deletion-blocks's num
+      based descending order.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.container.placement.impl</name>
+    <value>
+      org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementRandom
+    </value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>Placement policy class for containers.
+      Defaults to SCMContainerPlacementRandom.class
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.container.provision_batch_size</name>
+    <value>20</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>Pre-provision specified number of containers for block
+      allocation.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.container.report.processing.interval</name>
+    <value>60s</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>Time interval for scm to process container reports
+      for a node pool. Scm handles node pool reports in a cyclic clock
+      manner, it fetches pools periodically with this time interval.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.container.reports.wait.timeout</name>
+    <value>300s</value>
+    <tag>OZONE, PERFORMANCE, MANAGEMENT</tag>
+    <description>Maximum time to wait in seconds for processing all container
+      reports from
+      a node pool. It determines the timeout for a
+      node pool report.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.container.size.gb</name>
+    <value>5</value>
+    <tag>OZONE, PERFORMANCE, MANAGEMENT</tag>
+    <description>
+      Default container size used by Ozone. This value is specified
+      in GB.
+      There are two considerations while picking this number. The speed at which
+      a container can be replicated, determined by the network speed and the
+      metadata that each container generates. So selecting a large number
+      creates less SCM metadata, but recovery time will be more. 5GB is a number
+      that maps to quick replication times in gigabit networks, but still
+      balances the amount of metadata.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.datanode.address</name>
+    <value/>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The address of the Ozone SCM service used for internal
+      communication between the DataNodes and the SCM.
+
+      It is a string in the host:port format. The port number is optional
+      and defaults to 9861.
+
+      This setting is optional. If unspecified then the hostname portion
+      is picked from the ozone.scm.client.address setting and the
+      default service port of 9861 is chosen.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.datanode.bind.host</name>
+    <value/>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The hostname or IP address used by the SCM service endpoint to
+      bind.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.datanode.id</name>
+    <value/>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>The path that datanodes will use to store the datanode ID.
+      If this value is not set, then datanode ID is created under the
+      metadata directory.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.datanode.port</name>
+    <value>9861</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The port number of the Ozone SCM service.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.db.cache.size.mb</name>
+    <value>128</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>SCM keeps track of the Containers in the cluster. This DB holds
+      the container metadata. This value is set to a small value to make the
+      unit
+      testing runs smooth. In production, we recommend a value of 16GB or
+      higher. This allows SCM to avoid disk I/O's while looking up the container
+      location.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.dead.node.interval</name>
+    <value>10m</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The interval between heartbeats before a node is tagged as dead.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.handler.count.key</name>
+    <value>10</value>
+    <tag>OZONE, MANAGEMENT, PERFORMANCE</tag>
+    <description>
+      The number of RPC handler threads for each SCM service
+      endpoint.
+
+      The default is appropriate for small clusters (tens of nodes).
+
+      Set a value that is appropriate for the cluster size. Generally, HDFS
+      recommends RPC handler count is set to 20 * log2(Cluster Size) with an
+      upper limit of 200. However, SCM will not have the same amount of
+      traffic as Namenode, so a value much smaller than that will work well too.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.heartbeat.interval</name>
+    <value>30s</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The heartbeat interval from a data node to SCM. Yes,
+      it is not three but 30, since most data nodes will heart beating via Ratis
+      heartbeats. If a client is not able to talk to a data node, it will notify
+      KSM/SCM eventually. So a 30 second HB seems to work. This assumes that
+      replication strategy used is Ratis if not, this value should be set to
+      something smaller like 3 seconds.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.heartbeat.log.warn.interval.count</name>
+    <value>10</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      Defines how frequently we will log the missing of a heartbeat to SCM.
+      For example in the default case, we will write a warning message for each
+      ten consecutive heartbeats that we miss to SCM. This helps in reducing
+      clutter in a data node log, but trade off is that logs will have less of
+      this statement.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.heartbeat.rpc-timeout</name>
+    <value>1000</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      Timeout value for the RPC from Datanode to SCM in milliseconds.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.heartbeat.thread.interval</name>
+    <value>3s</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      When a heartbeat from the data node arrives on SCM, It is queued for
+      processing with the time stamp of when the heartbeat arrived. There is a
+      heartbeat processing thread inside SCM that runs at a specified interval.
+      This value controls how frequently this thread is run.
+
+      There are some assumptions build into SCM such as this value should allow
+      the heartbeat processing thread to run at least three times more
+      frequently than heartbeats and at least five times more than stale node
+      detection time. If you specify a wrong value, SCM will gracefully refuse
+      to run. For more info look at the node manager tests in SCM.
+
+      In short, you don't need to change this.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.http-address</name>
+    <value>0.0.0.0:9876</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The address and the base port where the SCM web ui will listen on.
+
+      If the port is 0 then the server will start on a free port.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.http-bind-host</name>
+    <value>0.0.0.0</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The actual address the SCM web server will bind to. If this
+      optional address is set, it overrides only the hostname portion of
+      ozone.scm.http-address.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.http.enabled</name>
+    <value>true</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      Property to enable or disable SCM web ui.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.https-address</name>
+    <value>0.0.0.0:9877</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The address and the base port where the SCM web UI will listen
+      on using HTTPS.
+
+      If the port is 0 then the server will start on a free port.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.https-bind-host</name>
+    <value>0.0.0.0</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The actual address the SCM web server will bind to using HTTPS.
+      If this optional address is set, it overrides only the hostname portion of
+      ozone.scm.http-address.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.keytab.file</name>
+    <value/>
+    <tag>OZONE, SECURITY</tag>
+    <description>
+      The keytab file for Kerberos authentication in SCM.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.max.container.report.threads</name>
+    <value>100</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>
+      Maximum number of threads to process container reports in scm.
+      Each container report from a data node is processed by scm in a worker
+      thread, fetched from a thread pool. This property is used to control the
+      maximum size of the thread pool.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.max.hb.count.to.process</name>
+    <value>5000</value>
+    <tag>OZONE, MANAGEMENT, PERFORMANCE</tag>
+    <description>
+      The maximum number of heartbeat to process per loop of the
+      heartbeat process thread. Please see
+      ozone.scm.heartbeat.thread.interval
+      for more info.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.max.nodepool.processing.threads</name>
+    <value>1</value>
+    <tag>OZONE, MANAGEMENT, PERFORMANCE</tag>
+    <description>
+      Number of node pools to process in parallel.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.names</name>
+    <value/>
+    <tag>OZONE</tag>
+    <description>
+      The value of this property is a set of DNS | DNS:PORT | IP
+      Address | IP:PORT. Written as a comma separated string. e.g. scm1,
+      scm2:8020, 7.7.7.7:7777.
+      This property allows datanodes to discover where SCM is, so that
+      datanodes can send heartbeat to SCM.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.stale.node.interval</name>
+    <value>90s</value>
+    <tag>OZONE, MANAGEMENT</tag>
+    <description>
+      The interval for stale node flagging. Please
+      see ozone.scm.heartbeat.thread.interval before changing this value.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.max.nodepool.processing.threads</name>
+    <value>1</value>
+    <tag>OZONE, SCM</tag>
+    <description>
+      Controls the number of node pools that can be processed in parallel by
+      Container Supervisor.
+    </description>
+  </property>
+  <property>
+    <name>ozone.trace.enabled</name>
+    <value>false</value>
+    <tag>OZONE, DEBUG</tag>
+    <description>
+      Setting this flag to true dumps the HTTP request/ response in
+      the logs. Very useful when debugging REST protocol.
+    </description>
+  </property>
+  <property>
+    <name>ozone.web.authentication.kerberos.principal</name>
+    <value/>
+    <tag>OZONE, SECURITY</tag>
+    <description>
+      The server principal used by the SCM and KSM for web UI SPNEGO
+      authentication when Kerberos security is enabled. This is typically set to
+      HTTP/_HOST@REALM.TLD The SPNEGO server principal begins with the prefix
+      HTTP/ by convention.
+
+      If the value is '*', the web server will attempt to login with
+      every principal specified in the keytab file.
+    </description>
+  </property>
+
+  <!--Client Settings-->
+  <property>
+    <name>scm.container.client.idle.threshold</name>
+    <value>10s</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>
+      In the standalone pipelines, the SCM clients use netty to
+      communicate with the container. It also uses connection pooling to
+      reduce client side overheads. This allows a connection to stay idle for
+      a while before the connection is closed.
+    </description>
+  </property>
+  <property>
+    <name>scm.container.client.max.size</name>
+    <value>256</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>
+      Controls the maximum number of connections that we cached via
+      clientconnection pooling. If the number of connection
+      exceed this count then the oldest idle connection is evicted.
+    </description>
+  </property>
+
+  <property>
+    <name>scm.container.client.max.outstanding.requests</name>
+    <value>100</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>
+      Controls the maximum number of outstanding async requests that can be
+      handled by the Standalone as well as Ratis client.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.scm.container.creation.lease.timeout</name>
+    <value>60s</value>
+    <tag>OZONE, SCM</tag>
+    <description>
+      Container creation timeout in milliseconds to be used by SCM. When
+      BEGIN_CREATE event happens the container is moved from ALLOCATED to
+      CREATING state, SCM will now wait for the configured amount of time
+      to get COMPLETE_CREATE event if it doesn't receive it will move the
+      container to DELETING.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.key.preallocation.maxsize</name>
+    <value>134217728</value>
+    <tag>OZONE, KSM, PERFORMANCE</tag>
+    <description>
+      When a new key write request is sent to KSM, if a size is requested, at most
+      128MB of size is allocated at request time. If client needs more space for the
+      write, separate block allocation requests will be made.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.client.list.cache</name>
+    <value>1000</value>
+    <tag>OZONE, PERFORMANCE</tag>
+    <description>
+      Configuration property to configure the cache size of client list calls.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.replication</name>
+    <value>3</value>
+    <tag>OZONE, CLIENT</tag>
+    <description>
+      Default replication value. The actual number of replications can be
+      specified when writing the key. The default is used if replication
+      is not specified. Supported values: 1 and 3.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.replication.type</name>
+    <value>RATIS</value>
+    <tag>OZONE, CLIENT</tag>
+    <description>
+      Default replication type to be used while writing key into ozone. The
+      value can be specified when writing the key, default is used when
+      nothing is specified. Supported values: RATIS, STAND_ALONE and CHAINED.
+    </description>
+  </property>
+  <property>
+    <name>ozone.scm.container.close.threshold</name>
+    <value>0.9f</value>
+    <tag>OZONE, SCM</tag>
+    <description>
+      This determines the threshold to be used for closing a container.
+      When the container used percentage reaches this threshold,
+      the container will be closed. Value should be a positive, non-zero
+      percentage in float notation (X.Yf), with 1.0f meaning 100%.
+    </description>
+  </property>
+  <property>
+    <name>ozone.rest.client.http.connection.max</name>
+    <value>100</value>
+    <tag>OZONE, CLIENT</tag>
+    <description>
+      This defines the overall connection limit for the connection pool used in
+      RestClient.
+    </description>
+  </property>
+  <property>
+    <name>ozone.rest.client.http.connection.per-route.max</name>
+    <value>20</value>
+    <tag>OZONE, CLIENT</tag>
+    <description>
+      This defines the connection limit per one HTTP route/host. Total max
+      connection is limited by ozone.rest.client.http.connection.max property.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.open.key.cleanup.service.interval.seconds</name>
+    <value>86400</value>
+    <tag>OZONE, KSM, PERFORMANCE</tag>
+    <description>
+      A background job periodically checks open key entries and delete the expired ones. This entry controls the
+      interval of this cleanup check.
+    </description>
+  </property>
+
+  <property>
+    <name>ozone.open.key.expire.threshold</name>
+    <value>86400</value>
+    <tag>OZONE, KSM, PERFORMANCE</tag>
+    <description>
+      Controls how long an open key operation is considered active. Specifically, if a key
+      has been open longer than the value of this config entry, that open key is considered as
+      expired (e.g. due to client crash). Default to 24 hours.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.custom.tags</name>
+    <value>OZONE,MANAGEMENT,SECURITY,PERFORMANCE,DEBUG,CLIENT,SERVER,KSM,SCM,CRITICAL,RATIS,CONTAINER,REQUIRED,REST,STORAGE,PIPELINE,STANDALONE</value>
+  </property>
+
+  <property>
+    <name>ozone.system.tags</name>
+    <value>OZONE,MANAGEMENT,SECURITY,PERFORMANCE,DEBUG,CLIENT,SERVER,KSM,SCM,CRITICAL,RATIS,CONTAINER,REQUIRED,REST,STORAGE,PIPELINE,STANDALONE</value>
+  </property>
+
+
+  <property>
+    <name>hdds.rest.rest-csrf.enabled</name>
+    <value>false</value>
+    <description>
+      If true, then enables Object Store REST server protection against
+      cross-site request forgery (CSRF).
+    </description>
+  </property>
+
+  <property>
+    <name>hdds.rest.http-address</name>
+    <value>0.0.0.0:9880</value>
+    <description>The http address of Object Store REST server inside the
+      datanode.</description>
+  </property>
+
+
+  <property>
+    <name>hdds.rest.netty.high.watermark</name>
+    <value>65535</value>
+    <description>
+      High watermark configuration to Netty for Object Store REST server.
+    </description>
+  </property>
+
+  <property>
+    <name>hdds.rest.netty.low.watermark</name>
+    <value>32768</value>
+    <description>
+      Low watermark configuration to Netty for Object Store REST server.
+    </description>
+  </property>
+
+  <property>
+    <name>hdds.datanode.plugins</name>
+    <value>org.apache.hadoop.ozone.web.OzoneHddsDatanodeService</value>
+    <description>
+      Comma-separated list of HDDS datanode plug-ins to be activated when
+      HDDS service starts as part of datanode.
+    </description>
+  </property>
+
+</configuration>
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/package-info.java
new file mode 100644
index 0000000..7966941
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+/**
+ Test cases for SCM client classes.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java
new file mode 100644
index 0000000..6b26b60
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestMetadataStore.java
@@ -0,0 +1,414 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.DFSUtilClient;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.junit.runners.Parameterized.Parameters;
+
+/**
+ * Test class for ozone metadata store.
+ */
+@RunWith(Parameterized.class)
+public class TestMetadataStore {
+
+  private final String storeImpl;
+
+  public TestMetadataStore(String metadataImpl) {
+    this.storeImpl = metadataImpl;
+  }
+
+  @Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList(new Object[][] {
+        {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB},
+        {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB}
+    });
+  }
+
+  private MetadataStore store;
+  private File testDir;
+  private final static int MAX_GETRANGE_LENGTH = 100;
+
+  @Rule
+  public ExpectedException expectedException = ExpectedException.none();
+
+  @Before
+  public void init() throws IOException {
+    testDir = GenericTestUtils.getTestDir(getClass().getSimpleName()
+        + "-" + storeImpl.toLowerCase());
+
+    Configuration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL, storeImpl);
+
+    store = MetadataStoreBuilder.newBuilder()
+        .setConf(conf)
+        .setCreateIfMissing(true)
+        .setDbFile(testDir)
+        .build();
+
+    // Add 20 entries.
+    // {a0 : a-value0} to {a9 : a-value9}
+    // {b0 : b-value0} to {b9 : b-value9}
+    for (int i=0; i<10; i++) {
+      store.put(getBytes("a" + i), getBytes("a-value" + i));
+      store.put(getBytes("b" + i), getBytes("b-value" + i));
+    }
+  }
+
+  @After
+  public void cleanup() throws IOException {
+    store.close();
+    store.destroy();
+    FileUtils.deleteDirectory(testDir);
+  }
+
+  private byte[] getBytes(String str) {
+    return str == null ? null :
+        DFSUtilClient.string2Bytes(str);
+  }
+
+  private String getString(byte[] bytes) {
+    return bytes == null ? null :
+        DFSUtilClient.bytes2String(bytes);
+  }
+
+  @Test
+  public void testGetDelete() throws IOException {
+    for (int i=0; i<10; i++) {
+      byte[] va = store.get(getBytes("a" + i));
+      Assert.assertEquals("a-value" + i, getString(va));
+
+      byte[] vb = store.get(getBytes("b" + i));
+      Assert.assertEquals("b-value" + i, getString(vb));
+    }
+
+    String keyToDel = "del-" + UUID.randomUUID().toString();
+    store.put(getBytes(keyToDel), getBytes(keyToDel));
+    Assert.assertEquals(keyToDel, getString(store.get(getBytes(keyToDel))));
+    store.delete(getBytes(keyToDel));
+    Assert.assertEquals(null, store.get(getBytes(keyToDel)));
+  }
+
+  @Test
+  public void testPeekFrom() throws IOException {
+    // Test peek from an element that has prev as well as next
+    testPeek("a3", "a2", "a4");
+
+    // Test peek from an element that only has prev
+    testPeek("b9", "b8", null);
+
+    // Test peek from an element that only has next
+    testPeek("a0", null, "a1");
+  }
+
+  private String getExpectedValue(String key) {
+    if (key == null) {
+      return null;
+    }
+    char[] arr = key.toCharArray();
+    return new StringBuffer().append(arr[0]).append("-value")
+        .append(arr[arr.length - 1]).toString();
+  }
+
+  private void testPeek(String peekKey, String prevKey, String nextKey)
+      throws IOException {
+    // Look for current
+    String k = null;
+    String v = null;
+    ImmutablePair<byte[], byte[]> current =
+        store.peekAround(0, getBytes(peekKey));
+    if (current != null) {
+      k = getString(current.getKey());
+      v = getString(current.getValue());
+    }
+    Assert.assertEquals(peekKey, k);
+    Assert.assertEquals(v, getExpectedValue(peekKey));
+
+    // Look for prev
+    k = null;
+    v = null;
+    ImmutablePair<byte[], byte[]> prev =
+        store.peekAround(-1, getBytes(peekKey));
+    if (prev != null) {
+      k = getString(prev.getKey());
+      v = getString(prev.getValue());
+    }
+    Assert.assertEquals(prevKey, k);
+    Assert.assertEquals(v, getExpectedValue(prevKey));
+
+    // Look for next
+    k = null;
+    v = null;
+    ImmutablePair<byte[], byte[]> next =
+        store.peekAround(1, getBytes(peekKey));
+    if (next != null) {
+      k = getString(next.getKey());
+      v = getString(next.getValue());
+    }
+    Assert.assertEquals(nextKey, k);
+    Assert.assertEquals(v, getExpectedValue(nextKey));
+  }
+
+  @Test
+  public void testIterateKeys() throws IOException {
+    // iterate keys from b0
+    ArrayList<String> result = Lists.newArrayList();
+    store.iterate(getBytes("b0"), (k, v) -> {
+      // b-value{i}
+      String value = getString(v);
+      char num = value.charAt(value.length() - 1);
+      // each value adds 1
+      int i = Character.getNumericValue(num) + 1;
+      value =  value.substring(0, value.length() - 1) + i;
+      result.add(value);
+      return true;
+    });
+
+    Assert.assertFalse(result.isEmpty());
+    for (int i=0; i<result.size(); i++) {
+      Assert.assertEquals("b-value" + (i+1), result.get(i));
+    }
+
+    // iterate from a non exist key
+    result.clear();
+    store.iterate(getBytes("xyz"), (k, v) -> {
+      result.add(getString(v));
+      return true;
+    });
+    Assert.assertTrue(result.isEmpty());
+
+    // iterate from the beginning
+    result.clear();
+    store.iterate(null, (k, v) -> {
+      result.add(getString(v));
+      return true;
+    });
+    Assert.assertEquals(20, result.size());
+  }
+
+  @Test
+  public void testGetRangeKVs() throws IOException {
+    List<Map.Entry<byte[], byte[]>> result = null;
+
+    // Set empty startKey will return values from beginning.
+    result = store.getRangeKVs(null, 5);
+    Assert.assertEquals(5, result.size());
+    Assert.assertEquals("a-value2", getString(result.get(2).getValue()));
+
+    // Empty list if startKey doesn't exist.
+    result = store.getRangeKVs(getBytes("a12"), 5);
+    Assert.assertEquals(0, result.size());
+
+    // Returns max available entries after a valid startKey.
+    result = store.getRangeKVs(getBytes("b0"), MAX_GETRANGE_LENGTH);
+    Assert.assertEquals(10, result.size());
+    Assert.assertEquals("b0", getString(result.get(0).getKey()));
+    Assert.assertEquals("b-value0", getString(result.get(0).getValue()));
+    result = store.getRangeKVs(getBytes("b0"), 5);
+    Assert.assertEquals(5, result.size());
+
+    // Both startKey and count are honored.
+    result = store.getRangeKVs(getBytes("a9"), 2);
+    Assert.assertEquals(2, result.size());
+    Assert.assertEquals("a9", getString(result.get(0).getKey()));
+    Assert.assertEquals("a-value9", getString(result.get(0).getValue()));
+    Assert.assertEquals("b0", getString(result.get(1).getKey()));
+    Assert.assertEquals("b-value0", getString(result.get(1).getValue()));
+
+    // Filter keys by prefix.
+    // It should returns all "b*" entries.
+    MetadataKeyFilter filter1 = new KeyPrefixFilter("b");
+    result = store.getRangeKVs(null, 100, filter1);
+    Assert.assertEquals(10, result.size());
+    Assert.assertTrue(result.stream().allMatch(entry ->
+        new String(entry.getKey()).startsWith("b")
+    ));
+    Assert.assertEquals(20, filter1.getKeysScannedNum());
+    Assert.assertEquals(10, filter1.getKeysHintedNum());
+    result = store.getRangeKVs(null, 3, filter1);
+    Assert.assertEquals(3, result.size());
+    result = store.getRangeKVs(getBytes("b3"), 1, filter1);
+    Assert.assertEquals("b-value3", getString(result.get(0).getValue()));
+
+    // Define a customized filter that filters keys by suffix.
+    // Returns all "*2" entries.
+    MetadataKeyFilter filter2 = (preKey, currentKey, nextKey)
+        -> getString(currentKey).endsWith("2");
+    result = store.getRangeKVs(null, MAX_GETRANGE_LENGTH, filter2);
+    Assert.assertEquals(2, result.size());
+    Assert.assertEquals("a2", getString(result.get(0).getKey()));
+    Assert.assertEquals("b2", getString(result.get(1).getKey()));
+    result = store.getRangeKVs(null, 1, filter2);
+    Assert.assertEquals(1, result.size());
+    Assert.assertEquals("a2", getString(result.get(0).getKey()));
+
+    // Apply multiple filters.
+    result = store.getRangeKVs(null, MAX_GETRANGE_LENGTH, filter1, filter2);
+    Assert.assertEquals(1, result.size());
+    Assert.assertEquals("b2", getString(result.get(0).getKey()));
+    Assert.assertEquals("b-value2", getString(result.get(0).getValue()));
+
+    // If filter is null, no effect.
+    result = store.getRangeKVs(null, 1, null);
+    Assert.assertEquals(1, result.size());
+    Assert.assertEquals("a0", getString(result.get(0).getKey()));
+  }
+
+  @Test
+  public void testGetSequentialRangeKVs() throws IOException {
+    MetadataKeyFilter suffixFilter = (preKey, currentKey, nextKey)
+        -> DFSUtil.bytes2String(currentKey).endsWith("2");
+    // Suppose to return a2 and b2
+    List<Map.Entry<byte[], byte[]>> result =
+        store.getRangeKVs(null, MAX_GETRANGE_LENGTH, suffixFilter);
+    Assert.assertEquals(2, result.size());
+    Assert.assertEquals("a2", DFSUtil.bytes2String(result.get(0).getKey()));
+    Assert.assertEquals("b2", DFSUtil.bytes2String(result.get(1).getKey()));
+
+    // Suppose to return just a2, because when it iterates to a3,
+    // the filter no long matches and it should stop from there.
+    result = store.getSequentialRangeKVs(null,
+        MAX_GETRANGE_LENGTH, suffixFilter);
+    Assert.assertEquals(1, result.size());
+    Assert.assertEquals("a2", DFSUtil.bytes2String(result.get(0).getKey()));
+  }
+
+  @Test
+  public void testGetRangeLength() throws IOException {
+    List<Map.Entry<byte[], byte[]>> result = null;
+
+    result = store.getRangeKVs(null, 0);
+    Assert.assertEquals(0, result.size());
+
+    result = store.getRangeKVs(null, 1);
+    Assert.assertEquals(1, result.size());
+
+    // Count less than zero is invalid.
+    expectedException.expect(IllegalArgumentException.class);
+    expectedException.expectMessage("Invalid count given");
+    store.getRangeKVs(null, -1);
+  }
+
+  @Test
+  public void testInvalidStartKey() throws IOException {
+    // If startKey is invalid, the returned list should be empty.
+    List<Map.Entry<byte[], byte[]>> kvs =
+        store.getRangeKVs(getBytes("unknownKey"), MAX_GETRANGE_LENGTH);
+    Assert.assertEquals(kvs.size(), 0);
+  }
+
+  @Test
+  public void testDestroyDB() throws IOException {
+    // create a new DB to test db destroy
+    Configuration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL, storeImpl);
+
+    File dbDir = GenericTestUtils.getTestDir(getClass().getSimpleName()
+        + "-" + storeImpl.toLowerCase() + "-toDestroy");
+    MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf)
+        .setCreateIfMissing(true)
+        .setDbFile(dbDir)
+        .build();
+
+    dbStore.put(getBytes("key1"), getBytes("value1"));
+    dbStore.put(getBytes("key2"), getBytes("value2"));
+
+    Assert.assertFalse(dbStore.isEmpty());
+    Assert.assertTrue(dbDir.exists());
+    Assert.assertTrue(dbDir.listFiles().length > 0);
+
+    dbStore.destroy();
+
+    Assert.assertFalse(dbDir.exists());
+  }
+
+  @Test
+  public void testBatchWrite() throws IOException {
+    Configuration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL, storeImpl);
+
+    File dbDir = GenericTestUtils.getTestDir(getClass().getSimpleName()
+        + "-" + storeImpl.toLowerCase() + "-batchWrite");
+    MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf)
+        .setCreateIfMissing(true)
+        .setDbFile(dbDir)
+        .build();
+
+    List<String> expectedResult = Lists.newArrayList();
+    for (int i = 0; i<10; i++) {
+      dbStore.put(getBytes("batch-" + i), getBytes("batch-value-" + i));
+      expectedResult.add("batch-" + i);
+    }
+
+    BatchOperation batch = new BatchOperation();
+    batch.delete(getBytes("batch-2"));
+    batch.delete(getBytes("batch-3"));
+    batch.delete(getBytes("batch-4"));
+    batch.put(getBytes("batch-new-2"), getBytes("batch-new-value-2"));
+
+    expectedResult.remove("batch-2");
+    expectedResult.remove("batch-3");
+    expectedResult.remove("batch-4");
+    expectedResult.add("batch-new-2");
+
+    dbStore.writeBatch(batch);
+
+    Iterator<String> it = expectedResult.iterator();
+    AtomicInteger count = new AtomicInteger(0);
+    dbStore.iterate(null, (key, value) -> {
+      count.incrementAndGet();
+      return it.hasNext() && it.next().equals(getString(key));
+    });
+
+    Assert.assertEquals(8, count.get());
+  }
+}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java
new file mode 100644
index 0000000..03c45c5
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/TestOzoneAcls.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Set;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * This class is to test acl stoarge and retreival in ozone store.
+ */
+public class TestOzoneAcls {
+
+  @Test
+  public void testAclParse() {
+    HashMap<String, Boolean> testMatrix;
+    testMatrix = new HashMap<>();
+
+    testMatrix.put("user:bilbo:r", Boolean.TRUE);
+    testMatrix.put("user:bilbo:w", Boolean.TRUE);
+    testMatrix.put("user:bilbo:rw", Boolean.TRUE);
+    testMatrix.put("user:bilbo:wr", Boolean.TRUE);
+    testMatrix.put("    user:bilbo:wr   ", Boolean.TRUE);
+
+
+    // ACLs makes no judgement on the quality of
+    // user names. it is for the userAuth interface
+    // to determine if a user name is really a name
+    testMatrix.put(" user:*:rw", Boolean.TRUE);
+    testMatrix.put(" user:~!:rw", Boolean.TRUE);
+
+
+    testMatrix.put("", Boolean.FALSE);
+    testMatrix.put(null, Boolean.FALSE);
+    testMatrix.put(" user:bilbo:", Boolean.FALSE);
+    testMatrix.put(" user:bilbo:rx", Boolean.FALSE);
+    testMatrix.put(" user:bilbo:mk", Boolean.FALSE);
+    testMatrix.put(" user::rw", Boolean.FALSE);
+    testMatrix.put("user11:bilbo:rw", Boolean.FALSE);
+    testMatrix.put(" user:::rw", Boolean.FALSE);
+
+    testMatrix.put(" group:hobbit:r", Boolean.TRUE);
+    testMatrix.put(" group:hobbit:w", Boolean.TRUE);
+    testMatrix.put(" group:hobbit:rw", Boolean.TRUE);
+    testMatrix.put(" group:hobbit:wr", Boolean.TRUE);
+    testMatrix.put(" group:*:rw", Boolean.TRUE);
+    testMatrix.put(" group:~!:rw", Boolean.TRUE);
+
+    testMatrix.put(" group:hobbit:", Boolean.FALSE);
+    testMatrix.put(" group:hobbit:rx", Boolean.FALSE);
+    testMatrix.put(" group:hobbit:mk", Boolean.FALSE);
+    testMatrix.put(" group::", Boolean.FALSE);
+    testMatrix.put(" group::rw", Boolean.FALSE);
+    testMatrix.put(" group22:hobbit:", Boolean.FALSE);
+    testMatrix.put(" group:::rw", Boolean.FALSE);
+
+    testMatrix.put("JUNK group:hobbit:r", Boolean.FALSE);
+    testMatrix.put("JUNK group:hobbit:w", Boolean.FALSE);
+    testMatrix.put("JUNK group:hobbit:rw", Boolean.FALSE);
+    testMatrix.put("JUNK group:hobbit:wr", Boolean.FALSE);
+    testMatrix.put("JUNK group:*:rw", Boolean.FALSE);
+    testMatrix.put("JUNK group:~!:rw", Boolean.FALSE);
+
+    testMatrix.put(" world::r", Boolean.TRUE);
+    testMatrix.put(" world::w", Boolean.TRUE);
+    testMatrix.put(" world::rw", Boolean.TRUE);
+    testMatrix.put(" world::wr", Boolean.TRUE);
+
+    testMatrix.put(" world:bilbo:w", Boolean.FALSE);
+    testMatrix.put(" world:bilbo:rw", Boolean.FALSE);
+
+    Set<String> keys = testMatrix.keySet();
+    for (String key : keys) {
+      if (testMatrix.get(key)) {
+        OzoneAcl.parseAcl(key);
+      } else {
+        try {
+          OzoneAcl.parseAcl(key);
+          // should never get here since parseAcl will throw
+          fail("An exception was expected but did not happen.");
+        } catch (IllegalArgumentException e) {
+          // nothing to do
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testAclValues() {
+    OzoneAcl acl = OzoneAcl.parseAcl("user:bilbo:rw");
+    assertEquals(acl.getName(), "bilbo");
+    assertEquals(OzoneAcl.OzoneACLRights.READ_WRITE, acl.getRights());
+    assertEquals(OzoneAcl.OzoneACLType.USER, acl.getType());
+
+    acl = OzoneAcl.parseAcl("user:bilbo:wr");
+    assertEquals("bilbo", acl.getName());
+    assertEquals(OzoneAcl.OzoneACLRights.READ_WRITE, acl.getRights());
+    assertEquals(OzoneAcl.OzoneACLType.USER, acl.getType());
+
+    acl = OzoneAcl.parseAcl("user:bilbo:r");
+    assertEquals("bilbo", acl.getName());
+    assertEquals(OzoneAcl.OzoneACLRights.READ, acl.getRights());
+    assertEquals(OzoneAcl.OzoneACLType.USER, acl.getType());
+
+    acl = OzoneAcl.parseAcl("user:bilbo:w");
+    assertEquals("bilbo", acl.getName());
+    assertEquals(OzoneAcl.OzoneACLRights.WRITE, acl.getRights());
+    assertEquals(OzoneAcl.OzoneACLType.USER, acl.getType());
+
+    acl = OzoneAcl.parseAcl("group:hobbit:wr");
+    assertEquals(acl.getName(), "hobbit");
+    assertEquals(OzoneAcl.OzoneACLRights.READ_WRITE, acl.getRights());
+    assertEquals(OzoneAcl.OzoneACLType.GROUP, acl.getType());
+
+    acl = OzoneAcl.parseAcl("world::wr");
+    assertEquals(acl.getName(), "");
+    assertEquals(OzoneAcl.OzoneACLRights.READ_WRITE, acl.getRights());
+    assertEquals(OzoneAcl.OzoneACLType.WORLD, acl.getType());
+  }
+
+}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java
new file mode 100644
index 0000000..c1470bb
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestStateMachine.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.common;
+
+import org.apache.commons.collections.SetUtils;
+import org.apache.hadoop.ozone.common.statemachine
+    .InvalidStateTransitionException;
+import org.apache.hadoop.ozone.common.statemachine.StateMachine;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CLEANUP;
+import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CLOSED;
+import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.CREATING;
+import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.FINAL;
+import static org.apache.hadoop.ozone.common.TestStateMachine.STATES.INIT;
+import static org.apache.hadoop.ozone.common.TestStateMachine.STATES
+    .OPERATIONAL;
+
+/**
+ * This class is to test ozone common state machine.
+ */
+public class TestStateMachine {
+
+  /**
+   * STATES used by the test state machine.
+   */
+  public enum STATES {INIT, CREATING, OPERATIONAL, CLOSED, CLEANUP, FINAL};
+
+  /**
+   * EVENTS used by the test state machine.
+   */
+  public enum EVENTS {ALLOCATE, CREATE, UPDATE, CLOSE, DELETE, TIMEOUT};
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @Test
+  public void testStateMachineStates() throws InvalidStateTransitionException {
+    Set<STATES> finals = new HashSet<>();
+    finals.add(FINAL);
+
+    StateMachine<STATES, EVENTS> stateMachine =
+        new StateMachine<>(INIT, finals);
+
+    stateMachine.addTransition(INIT, CREATING, EVENTS.ALLOCATE);
+    stateMachine.addTransition(CREATING, OPERATIONAL, EVENTS.CREATE);
+    stateMachine.addTransition(OPERATIONAL, OPERATIONAL, EVENTS.UPDATE);
+    stateMachine.addTransition(OPERATIONAL, CLEANUP, EVENTS.DELETE);
+    stateMachine.addTransition(OPERATIONAL, CLOSED, EVENTS.CLOSE);
+    stateMachine.addTransition(CREATING, CLEANUP, EVENTS.TIMEOUT);
+
+    // Initial and Final states
+    Assert.assertEquals("Initial State", INIT, stateMachine.getInitialState());
+    Assert.assertTrue("Final States", SetUtils.isEqualSet(finals,
+        stateMachine.getFinalStates()));
+
+    // Valid state transitions
+    Assert.assertEquals("STATE should be OPERATIONAL after being created",
+        OPERATIONAL, stateMachine.getNextState(CREATING, EVENTS.CREATE));
+    Assert.assertEquals("STATE should be OPERATIONAL after being updated",
+        OPERATIONAL, stateMachine.getNextState(OPERATIONAL, EVENTS.UPDATE));
+    Assert.assertEquals("STATE should be CLEANUP after being deleted",
+        CLEANUP, stateMachine.getNextState(OPERATIONAL, EVENTS.DELETE));
+    Assert.assertEquals("STATE should be CLEANUP after being timeout",
+        CLEANUP, stateMachine.getNextState(CREATING, EVENTS.TIMEOUT));
+    Assert.assertEquals("STATE should be CLOSED after being closed",
+        CLOSED, stateMachine.getNextState(OPERATIONAL, EVENTS.CLOSE));
+
+    // Negative cases: invalid transition
+    expectException();
+    stateMachine.getNextState(OPERATIONAL, EVENTS.CREATE);
+
+    expectException();
+    stateMachine.getNextState(CREATING, EVENTS.CLOSE);
+  }
+
+  /**
+   * We expect an InvalidStateTransitionException.
+   */
+  private void expectException() {
+    exception.expect(InvalidStateTransitionException.class);
+    exception.expectMessage("Invalid event");
+  }
+
+}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java
new file mode 100644
index 0000000..517c1a7
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/TestLeaseManager.java
@@ -0,0 +1,374 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+/**
+ * A generic lease management API which can be used if a service
+ * needs any kind of lease management.
+ */
+
+package org.apache.hadoop.ozone.lease;
+
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Test class to check functionality and consistency of LeaseManager.
+ */
+public class TestLeaseManager {
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  /**
+   * Dummy resource on which leases can be acquired.
+   */
+  private final class DummyResource {
+
+    private final String name;
+
+    private DummyResource(String name) {
+      this.name = name;
+    }
+
+    @Override
+    public int hashCode() {
+      return name.hashCode();
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if(obj instanceof DummyResource) {
+        return name.equals(((DummyResource) obj).name);
+      }
+      return false;
+    }
+  }
+
+  @Test
+  public void testLeaseAcquireAndRelease() throws LeaseException {
+    //It is assumed that the test case execution won't take more than 5 seconds,
+    //if it takes more time increase the defaultTimeout value of LeaseManager.
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    DummyResource resourceTwo = new DummyResource("two");
+    DummyResource resourceThree = new DummyResource("three");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    Lease<DummyResource> leaseTwo = manager.acquire(resourceTwo);
+    Lease<DummyResource> leaseThree = manager.acquire(resourceThree);
+    Assert.assertEquals(leaseOne, manager.get(resourceOne));
+    Assert.assertEquals(leaseTwo, manager.get(resourceTwo));
+    Assert.assertEquals(leaseThree, manager.get(resourceThree));
+    Assert.assertFalse(leaseOne.hasExpired());
+    Assert.assertFalse(leaseTwo.hasExpired());
+    Assert.assertFalse(leaseThree.hasExpired());
+    //The below releases should not throw LeaseNotFoundException.
+    manager.release(resourceOne);
+    manager.release(resourceTwo);
+    manager.release(resourceThree);
+    Assert.assertTrue(leaseOne.hasExpired());
+    Assert.assertTrue(leaseTwo.hasExpired());
+    Assert.assertTrue(leaseThree.hasExpired());
+    manager.shutdown();
+  }
+
+  @Test
+  public void testLeaseAlreadyExist() throws LeaseException {
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    DummyResource resourceTwo = new DummyResource("two");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    Lease<DummyResource> leaseTwo = manager.acquire(resourceTwo);
+    Assert.assertEquals(leaseOne, manager.get(resourceOne));
+    Assert.assertEquals(leaseTwo, manager.get(resourceTwo));
+
+    exception.expect(LeaseAlreadyExistException.class);
+    exception.expectMessage("Resource: " + resourceOne);
+    manager.acquire(resourceOne);
+
+    manager.release(resourceOne);
+    manager.release(resourceTwo);
+    manager.shutdown();
+  }
+
+  @Test
+  public void testLeaseNotFound() throws LeaseException, InterruptedException {
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    DummyResource resourceTwo = new DummyResource("two");
+    DummyResource resourceThree = new DummyResource("three");
+
+    //Case 1: lease was never acquired.
+    exception.expect(LeaseNotFoundException.class);
+    exception.expectMessage("Resource: " + resourceOne);
+    manager.get(resourceOne);
+
+    //Case 2: lease is acquired and released.
+    Lease<DummyResource> leaseTwo = manager.acquire(resourceTwo);
+    Assert.assertEquals(leaseTwo, manager.get(resourceTwo));
+    Assert.assertFalse(leaseTwo.hasExpired());
+    manager.release(resourceTwo);
+    Assert.assertTrue(leaseTwo.hasExpired());
+    exception.expect(LeaseNotFoundException.class);
+    exception.expectMessage("Resource: " + resourceTwo);
+    manager.get(resourceTwo);
+
+    //Case 3: lease acquired and timed out.
+    Lease<DummyResource> leaseThree = manager.acquire(resourceThree);
+    Assert.assertEquals(leaseThree, manager.get(resourceThree));
+    Assert.assertFalse(leaseThree.hasExpired());
+    long sleepTime = leaseThree.getRemainingTime() + 1000;
+    try {
+      Thread.sleep(sleepTime);
+    } catch (InterruptedException ex) {
+      //even in case of interrupt we have to wait till lease times out.
+      Thread.sleep(sleepTime);
+    }
+    Assert.assertTrue(leaseThree.hasExpired());
+    exception.expect(LeaseNotFoundException.class);
+    exception.expectMessage("Resource: " + resourceThree);
+    manager.get(resourceThree);
+    manager.shutdown();
+  }
+
+  @Test
+  public void testCustomLeaseTimeout() throws LeaseException {
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    DummyResource resourceTwo = new DummyResource("two");
+    DummyResource resourceThree = new DummyResource("three");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    Lease<DummyResource> leaseTwo = manager.acquire(resourceTwo, 10000);
+    Lease<DummyResource> leaseThree = manager.acquire(resourceThree, 50000);
+    Assert.assertEquals(leaseOne, manager.get(resourceOne));
+    Assert.assertEquals(leaseTwo, manager.get(resourceTwo));
+    Assert.assertEquals(leaseThree, manager.get(resourceThree));
+    Assert.assertFalse(leaseOne.hasExpired());
+    Assert.assertFalse(leaseTwo.hasExpired());
+    Assert.assertFalse(leaseThree.hasExpired());
+    Assert.assertEquals(5000, leaseOne.getLeaseLifeTime());
+    Assert.assertEquals(10000, leaseTwo.getLeaseLifeTime());
+    Assert.assertEquals(50000, leaseThree.getLeaseLifeTime());
+    // Releasing of leases is done in shutdown, so don't have to worry about
+    // lease release
+    manager.shutdown();
+  }
+
+  @Test
+  public void testLeaseCallback() throws LeaseException, InterruptedException {
+    Map<DummyResource, String> leaseStatus = new HashMap<>();
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    leaseStatus.put(resourceOne, "lease in use");
+    leaseOne.registerCallBack(() -> {
+      leaseStatus.put(resourceOne, "lease expired");
+      return null;
+    });
+    // wait for lease to expire
+    long sleepTime = leaseOne.getRemainingTime() + 1000;
+    try {
+      Thread.sleep(sleepTime);
+    } catch (InterruptedException ex) {
+      //even in case of interrupt we have to wait till lease times out.
+      Thread.sleep(sleepTime);
+    }
+    Assert.assertTrue(leaseOne.hasExpired());
+    exception.expect(LeaseNotFoundException.class);
+    exception.expectMessage("Resource: " + resourceOne);
+    manager.get(resourceOne);
+    // check if callback has been executed
+    Assert.assertEquals("lease expired", leaseStatus.get(resourceOne));
+  }
+
+  @Test
+  public void testCallbackExecutionInCaseOfLeaseRelease()
+      throws LeaseException, InterruptedException {
+    // Callbacks should not be executed in case of lease release
+    Map<DummyResource, String> leaseStatus = new HashMap<>();
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    leaseStatus.put(resourceOne, "lease in use");
+    leaseOne.registerCallBack(() -> {
+      leaseStatus.put(resourceOne, "lease expired");
+      return null;
+    });
+    leaseStatus.put(resourceOne, "lease released");
+    manager.release(resourceOne);
+    Assert.assertTrue(leaseOne.hasExpired());
+    exception.expect(LeaseNotFoundException.class);
+    exception.expectMessage("Resource: " + resourceOne);
+    manager.get(resourceOne);
+    Assert.assertEquals("lease released", leaseStatus.get(resourceOne));
+  }
+
+  @Test
+  public void testLeaseCallbackWithMultipleLeases()
+      throws LeaseException, InterruptedException {
+    Map<DummyResource, String> leaseStatus = new HashMap<>();
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    DummyResource resourceTwo = new DummyResource("two");
+    DummyResource resourceThree = new DummyResource("three");
+    DummyResource resourceFour = new DummyResource("four");
+    DummyResource resourceFive = new DummyResource("five");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    Lease<DummyResource> leaseTwo = manager.acquire(resourceTwo);
+    Lease<DummyResource> leaseThree = manager.acquire(resourceThree);
+    Lease<DummyResource> leaseFour = manager.acquire(resourceFour);
+    Lease<DummyResource> leaseFive = manager.acquire(resourceFive);
+    leaseStatus.put(resourceOne, "lease in use");
+    leaseStatus.put(resourceTwo, "lease in use");
+    leaseStatus.put(resourceThree, "lease in use");
+    leaseStatus.put(resourceFour, "lease in use");
+    leaseStatus.put(resourceFive, "lease in use");
+    leaseOne.registerCallBack(() -> {
+      leaseStatus.put(resourceOne, "lease expired");
+      return null;
+    });
+    leaseTwo.registerCallBack(() -> {
+      leaseStatus.put(resourceTwo, "lease expired");
+      return null;
+    });
+    leaseThree.registerCallBack(() -> {
+      leaseStatus.put(resourceThree, "lease expired");
+      return null;
+    });
+    leaseFour.registerCallBack(() -> {
+      leaseStatus.put(resourceFour, "lease expired");
+      return null;
+    });
+    leaseFive.registerCallBack(() -> {
+      leaseStatus.put(resourceFive, "lease expired");
+      return null;
+    });
+
+    // release lease one, two and three
+    leaseStatus.put(resourceOne, "lease released");
+    manager.release(resourceOne);
+    leaseStatus.put(resourceTwo, "lease released");
+    manager.release(resourceTwo);
+    leaseStatus.put(resourceThree, "lease released");
+    manager.release(resourceThree);
+
+    // wait for other leases to expire
+    long sleepTime = leaseFive.getRemainingTime() + 1000;
+
+    try {
+      Thread.sleep(sleepTime);
+    } catch (InterruptedException ex) {
+      //even in case of interrupt we have to wait till lease times out.
+      Thread.sleep(sleepTime);
+    }
+    Assert.assertTrue(leaseOne.hasExpired());
+    Assert.assertTrue(leaseTwo.hasExpired());
+    Assert.assertTrue(leaseThree.hasExpired());
+    Assert.assertTrue(leaseFour.hasExpired());
+    Assert.assertTrue(leaseFive.hasExpired());
+
+    Assert.assertEquals("lease released", leaseStatus.get(resourceOne));
+    Assert.assertEquals("lease released", leaseStatus.get(resourceTwo));
+    Assert.assertEquals("lease released", leaseStatus.get(resourceThree));
+    Assert.assertEquals("lease expired", leaseStatus.get(resourceFour));
+    Assert.assertEquals("lease expired", leaseStatus.get(resourceFive));
+    manager.shutdown();
+  }
+
+  @Test
+  public void testReuseReleasedLease() throws LeaseException {
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    Assert.assertEquals(leaseOne, manager.get(resourceOne));
+    Assert.assertFalse(leaseOne.hasExpired());
+
+    manager.release(resourceOne);
+    Assert.assertTrue(leaseOne.hasExpired());
+
+    Lease<DummyResource> sameResourceLease = manager.acquire(resourceOne);
+    Assert.assertEquals(sameResourceLease, manager.get(resourceOne));
+    Assert.assertFalse(sameResourceLease.hasExpired());
+
+    manager.release(resourceOne);
+    Assert.assertTrue(sameResourceLease.hasExpired());
+    manager.shutdown();
+  }
+
+  @Test
+  public void testReuseTimedOutLease()
+      throws LeaseException, InterruptedException {
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    Assert.assertEquals(leaseOne, manager.get(resourceOne));
+    Assert.assertFalse(leaseOne.hasExpired());
+
+    // wait for lease to expire
+    long sleepTime = leaseOne.getRemainingTime() + 1000;
+    try {
+      Thread.sleep(sleepTime);
+    } catch (InterruptedException ex) {
+      //even in case of interrupt we have to wait till lease times out.
+      Thread.sleep(sleepTime);
+    }
+    Assert.assertTrue(leaseOne.hasExpired());
+
+    Lease<DummyResource> sameResourceLease = manager.acquire(resourceOne);
+    Assert.assertEquals(sameResourceLease, manager.get(resourceOne));
+    Assert.assertFalse(sameResourceLease.hasExpired());
+
+    manager.release(resourceOne);
+    Assert.assertTrue(sameResourceLease.hasExpired());
+    manager.shutdown();
+  }
+
+  @Test
+  public void testRenewLease() throws LeaseException, InterruptedException {
+    LeaseManager<DummyResource> manager = new LeaseManager<>(5000);
+    manager.start();
+    DummyResource resourceOne = new DummyResource("one");
+    Lease<DummyResource> leaseOne = manager.acquire(resourceOne);
+    Assert.assertEquals(leaseOne, manager.get(resourceOne));
+    Assert.assertFalse(leaseOne.hasExpired());
+
+    // add 5 more seconds to the lease
+    leaseOne.renew(5000);
+
+    Thread.sleep(5000);
+
+    // lease should still be active
+    Assert.assertEquals(leaseOne, manager.get(resourceOne));
+    Assert.assertFalse(leaseOne.hasExpired());
+    manager.release(resourceOne);
+    manager.shutdown();
+  }
+
+}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java
new file mode 100644
index 0000000..1071309
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/lease/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.lease;
+/*
+ This package contains lease management unit test classes.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/package-info.java
new file mode 100644
index 0000000..0030d2e
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+/**
+ * Ozone related test helper classes and tests of common utils.
+ */
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java
new file mode 100644
index 0000000..a7ce60b
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.utils;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Assert;
+import org.junit.Test;
+
+import javax.management.MBeanServer;
+import java.io.File;
+import java.lang.management.ManagementFactory;
+
+/**
+ * Test the JMX interface for the rocksdb metastore implementation.
+ */
+public class TestRocksDBStoreMBean {
+
+  @Test
+  public void testJmxBeans() throws Exception {
+    File testDir =
+        GenericTestUtils.getTestDir(getClass().getSimpleName() + "-withstat");
+
+    Configuration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL,
+        OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB);
+
+    RocksDBStore metadataStore =
+        (RocksDBStore) MetadataStoreBuilder.newBuilder().setConf(conf)
+            .setCreateIfMissing(true).setDbFile(testDir).build();
+
+    for (int i = 0; i < 10; i++) {
+      metadataStore.put("key".getBytes(), "value".getBytes());
+    }
+
+    MBeanServer platformMBeanServer =
+        ManagementFactory.getPlatformMBeanServer();
+    Thread.sleep(2000);
+
+    Object keysWritten = platformMBeanServer
+        .getAttribute(metadataStore.getStatMBeanName(), "NUMBER_KEYS_WRITTEN");
+
+    Assert.assertEquals(10L, keysWritten);
+
+    Object dbWriteAverage = platformMBeanServer
+        .getAttribute(metadataStore.getStatMBeanName(), "DB_WRITE_AVERAGE");
+    Assert.assertTrue((double) dbWriteAverage > 0);
+
+    metadataStore.close();
+
+  }
+
+  @Test()
+  public void testDisabledStat() throws Exception {
+    File testDir = GenericTestUtils
+        .getTestDir(getClass().getSimpleName() + "-withoutstat");
+
+    Configuration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL,
+        OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB);
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_ROCKSDB_STATISTICS,
+        OzoneConfigKeys.OZONE_METADATA_STORE_ROCKSDB_STATISTICS_OFF);
+
+    RocksDBStore metadataStore =
+        (RocksDBStore) MetadataStoreBuilder.newBuilder().setConf(conf)
+            .setCreateIfMissing(true).setDbFile(testDir).build();
+
+    Assert.assertNull(metadataStore.getStatMBeanName());
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/pom.xml b/hadoop-hdds/container-service/pom.xml
new file mode 100644
index 0000000..3dc8470
--- /dev/null
+++ b/hadoop-hdds/container-service/pom.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-hdds</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-hdds-container-service</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache HDDS Container server</description>
+  <name>Apache HDDS Container server</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdds</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-framework</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <version>2.2.0</version>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>io.dropwizard.metrics</groupId>
+      <artifactId>metrics-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-maven-plugins</artifactId>
+        <executions>
+          <execution>
+            <id>compile-protoc</id>
+            <goals>
+              <goal>protoc</goal>
+            </goals>
+            <configuration>
+              <protocVersion>${protobuf.version}</protocVersion>
+              <protocCommand>${protoc.path}</protocCommand>
+              <imports>
+                <param>
+                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdds/common/src/main/proto/
+                </param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>StorageContainerDatanodeProtocol.proto</include>
+                </includes>
+              </source>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/HddsServerUtil.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/HddsServerUtil.java
new file mode 100644
index 0000000..c734d9b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/HddsServerUtil.java
@@ -0,0 +1,325 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import com.google.common.base.Optional;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.InetSocketAddress;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DEADNODE_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DEADNODE_INTERVAL_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_LOG_WARN_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_LOG_WARN_INTERVAL_COUNT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_RPC_TIMEOUT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_STALENODE_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_STALENODE_INTERVAL_DEFAULT;
+import static org.apache.hadoop.hdds.HddsUtils.*;
+import static org.apache.hadoop.hdds.server.ServerUtils.sanitizeUserArgs;
+
+/**
+ * Hdds stateless helper functions for server side components.
+ */
+public final class HddsServerUtil {
+
+  private HddsServerUtil() {
+  }
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      HddsServerUtil.class);
+
+  /**
+   * Retrieve the socket address that should be used by DataNodes to connect
+   * to the SCM.
+   *
+   * @param conf
+   * @return Target InetSocketAddress for the SCM service endpoint.
+   */
+  public static InetSocketAddress getScmAddressForDataNodes(
+      Configuration conf) {
+    // We try the following settings in decreasing priority to retrieve the
+    // target host.
+    // - OZONE_SCM_DATANODE_ADDRESS_KEY
+    // - OZONE_SCM_CLIENT_ADDRESS_KEY
+    //
+    final Optional<String> host = getHostNameFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY,
+        ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY);
+
+    if (!host.isPresent()) {
+      throw new IllegalArgumentException(
+          ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY +
+              " must be defined. See" +
+              " https://wiki.apache.org/hadoop/Ozone#Configuration "
+              + "for details on configuring Ozone.");
+    }
+
+    // If no port number is specified then we'll just try the defaultBindPort.
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY);
+
+    InetSocketAddress addr = NetUtils.createSocketAddr(host.get() + ":" +
+        port.or(ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT));
+
+    return addr;
+  }
+
+  /**
+   * Retrieve the socket address that should be used by clients to connect
+   * to the SCM.
+   *
+   * @param conf
+   * @return Target InetSocketAddress for the SCM client endpoint.
+   */
+  public static InetSocketAddress getScmClientBindAddress(
+      Configuration conf) {
+    final Optional<String> host = getHostNameFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY);
+
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY);
+
+    return NetUtils.createSocketAddr(
+        host.or(ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_DEFAULT) + ":" +
+            port.or(ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT));
+  }
+
+  /**
+   * Retrieve the socket address that should be used by clients to connect
+   * to the SCM Block service.
+   *
+   * @param conf
+   * @return Target InetSocketAddress for the SCM block client endpoint.
+   */
+  public static InetSocketAddress getScmBlockClientBindAddress(
+      Configuration conf) {
+    final Optional<String> host = getHostNameFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_BIND_HOST_KEY);
+
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY);
+
+    return NetUtils.createSocketAddr(
+        host.or(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_BIND_HOST_DEFAULT) +
+            ":" + port.or(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_PORT_DEFAULT));
+  }
+
+  /**
+   * Retrieve the socket address that should be used by DataNodes to connect
+   * to the SCM.
+   *
+   * @param conf
+   * @return Target InetSocketAddress for the SCM service endpoint.
+   */
+  public static InetSocketAddress getScmDataNodeBindAddress(
+      Configuration conf) {
+    final Optional<String> host = getHostNameFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_KEY);
+
+    // If no port number is specified then we'll just try the defaultBindPort.
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY);
+
+    return NetUtils.createSocketAddr(
+        host.or(ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_DEFAULT) + ":" +
+            port.or(ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT));
+  }
+
+
+  /**
+   * Returns the interval in which the heartbeat processor thread runs.
+   *
+   * @param conf - Configuration
+   * @return long in Milliseconds.
+   */
+  public static long getScmheartbeatCheckerInterval(Configuration conf) {
+    return conf.getTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
+        ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL_DEFAULT,
+        TimeUnit.MILLISECONDS);
+  }
+
+  /**
+   * Heartbeat Interval - Defines the heartbeat frequency from a datanode to
+   * SCM.
+   *
+   * @param conf - Ozone Config
+   * @return - HB interval in seconds.
+   */
+  public static long getScmHeartbeatInterval(Configuration conf) {
+    return conf.getTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL,
+        ScmConfigKeys.OZONE_SCM_HEARBEAT_INTERVAL_DEFAULT,
+        TimeUnit.SECONDS);
+  }
+
+  /**
+   * Get the Stale Node interval, which is used by SCM to flag a datanode as
+   * stale, if the heartbeat from that node has been missing for this duration.
+   *
+   * @param conf - Configuration.
+   * @return - Long, Milliseconds to wait before flagging a node as stale.
+   */
+  public static long getStaleNodeInterval(Configuration conf) {
+
+    long staleNodeIntervalMs =
+        conf.getTimeDuration(OZONE_SCM_STALENODE_INTERVAL,
+            OZONE_SCM_STALENODE_INTERVAL_DEFAULT, TimeUnit.MILLISECONDS);
+
+    long heartbeatThreadFrequencyMs = getScmheartbeatCheckerInterval(conf);
+
+    long heartbeatIntervalMs = getScmHeartbeatInterval(conf) * 1000;
+
+
+    // Make sure that StaleNodeInterval is configured way above the frequency
+    // at which we run the heartbeat thread.
+    //
+    // Here we check that staleNodeInterval is at least five times more than the
+    // frequency at which the accounting thread is going to run.
+    try {
+      sanitizeUserArgs(staleNodeIntervalMs, heartbeatThreadFrequencyMs,
+          5, 1000);
+    } catch (IllegalArgumentException ex) {
+      LOG.error("Stale Node Interval is cannot be honored due to " +
+              "mis-configured {}. ex:  {}",
+          OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, ex);
+      throw ex;
+    }
+
+    // Make sure that stale node value is greater than configured value that
+    // datanodes are going to send HBs.
+    try {
+      sanitizeUserArgs(staleNodeIntervalMs, heartbeatIntervalMs, 3, 1000);
+    } catch (IllegalArgumentException ex) {
+      LOG.error("Stale Node Interval MS is cannot be honored due to " +
+          "mis-configured {}. ex:  {}", OZONE_SCM_HEARTBEAT_INTERVAL, ex);
+      throw ex;
+    }
+    return staleNodeIntervalMs;
+  }
+
+  /**
+   * Gets the interval for dead node flagging. This has to be a value that is
+   * greater than stale node value,  and by transitive relation we also know
+   * that this value is greater than heartbeat interval and heartbeatProcess
+   * Interval.
+   *
+   * @param conf - Configuration.
+   * @return - the interval for dead node flagging.
+   */
+  public static long getDeadNodeInterval(Configuration conf) {
+    long staleNodeIntervalMs = getStaleNodeInterval(conf);
+    long deadNodeIntervalMs = conf.getTimeDuration(OZONE_SCM_DEADNODE_INTERVAL,
+        OZONE_SCM_DEADNODE_INTERVAL_DEFAULT,
+        TimeUnit.MILLISECONDS);
+
+    try {
+      // Make sure that dead nodes Ms is at least twice the time for staleNodes
+      // with a max of 1000 times the staleNodes.
+      sanitizeUserArgs(deadNodeIntervalMs, staleNodeIntervalMs, 2, 1000);
+    } catch (IllegalArgumentException ex) {
+      LOG.error("Dead Node Interval MS is cannot be honored due to " +
+          "mis-configured {}. ex:  {}", OZONE_SCM_STALENODE_INTERVAL, ex);
+      throw ex;
+    }
+    return deadNodeIntervalMs;
+  }
+
+  /**
+   * Returns the maximum number of heartbeat to process per loop of the process
+   * thread.
+   * @param conf Configuration
+   * @return - int -- Number of HBs to process
+   */
+  public static int getMaxHBToProcessPerLoop(Configuration conf) {
+    return conf.getInt(ScmConfigKeys.OZONE_SCM_MAX_HB_COUNT_TO_PROCESS,
+        ScmConfigKeys.OZONE_SCM_MAX_HB_COUNT_TO_PROCESS_DEFAULT);
+  }
+
+  /**
+   * Timeout value for the RPC from Datanode to SCM, primarily used for
+   * Heartbeats and container reports.
+   *
+   * @param conf - Ozone Config
+   * @return - Rpc timeout in Milliseconds.
+   */
+  public static long getScmRpcTimeOutInMilliseconds(Configuration conf) {
+    return conf.getTimeDuration(OZONE_SCM_HEARTBEAT_RPC_TIMEOUT,
+        OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS);
+  }
+
+  /**
+   * Log Warn interval.
+   *
+   * @param conf - Ozone Config
+   * @return - Log warn interval.
+   */
+  public static int getLogWarnInterval(Configuration conf) {
+    return conf.getInt(OZONE_SCM_HEARTBEAT_LOG_WARN_INTERVAL_COUNT,
+        OZONE_SCM_HEARTBEAT_LOG_WARN_DEFAULT);
+  }
+
+  /**
+   * returns the Container port.
+   * @param conf - Conf
+   * @return port number.
+   */
+  public static int getContainerPort(Configuration conf) {
+    return conf.getInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+        OzoneConfigKeys.DFS_CONTAINER_IPC_PORT_DEFAULT);
+  }
+
+
+  /**
+   * Return the list of service addresses for the Ozone SCM. This method is used
+   * by the DataNodes to determine the service instances to connect to.
+   *
+   * @param conf
+   * @return list of SCM service addresses.
+   */
+  public static Map<String, ? extends Map<String, InetSocketAddress>>
+      getScmServiceRpcAddresses(Configuration conf) {
+
+    final Map<String, InetSocketAddress> serviceInstances = new HashMap<>();
+    serviceInstances.put(OZONE_SCM_SERVICE_INSTANCE_ID,
+        getScmAddressForDataNodes(conf));
+
+    final Map<String, Map<String, InetSocketAddress>> services =
+        new HashMap<>();
+    services.put(OZONE_SCM_SERVICE_ID, serviceInstances);
+    return services;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/VersionInfo.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/VersionInfo.java
new file mode 100644
index 0000000..4e52046
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/VersionInfo.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+
+/**
+ * This is a class that tracks versions of SCM.
+ */
+public final class VersionInfo {
+
+  // We will just be normal and use positive counting numbers for versions.
+  private final static VersionInfo[] VERSION_INFOS =
+      {new VersionInfo("First version of SCM", 1)};
+
+
+  public static final String DESCRIPTION_KEY = "Description";
+  private final String description;
+  private final int version;
+
+  /**
+   * Never created outside this class.
+   *
+   * @param description -- description
+   * @param version     -- version number
+   */
+  private VersionInfo(String description, int version) {
+    this.description = description;
+    this.version = version;
+  }
+
+  /**
+   * Returns all versions.
+   *
+   * @return Version info array.
+   */
+  public static VersionInfo[] getAllVersions() {
+    return VERSION_INFOS.clone();
+  }
+
+  /**
+   * Returns the latest version.
+   *
+   * @return versionInfo
+   */
+  public static VersionInfo getLatestVersion() {
+    return VERSION_INFOS[VERSION_INFOS.length - 1];
+  }
+
+  /**
+   * Return description.
+   *
+   * @return String
+   */
+  public String getDescription() {
+    return description;
+  }
+
+  /**
+   * Return the version.
+   *
+   * @return int.
+   */
+  public int getVersion() {
+    return version;
+  }
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
new file mode 100644
index 0000000..5905468
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
new file mode 100644
index 0000000..fa4187a
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.HddsUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .DatanodeStateMachine;
+import org.apache.hadoop.util.ServicePlugin;
+import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.List;
+import java.util.UUID;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys.HDDS_DATANODE_PLUGINS_KEY;
+import static org.apache.hadoop.util.ExitUtil.terminate;
+
+/**
+ * Datanode service plugin to start the HDDS container services.
+ */
+public class HddsDatanodeService implements ServicePlugin {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      HddsDatanodeService.class);
+
+
+  private OzoneConfiguration conf;
+  private DatanodeDetails datanodeDetails;
+  private DatanodeStateMachine datanodeStateMachine;
+  private List<ServicePlugin> plugins;
+
+  /**
+   * Default constructor.
+   */
+  public HddsDatanodeService() {
+    this(null);
+  }
+
+  /**
+   * Constructs {@link HddsDatanodeService} using the provided {@code conf}
+   * value.
+   *
+   * @param conf OzoneConfiguration
+   */
+  public HddsDatanodeService(Configuration conf) {
+    if (conf == null) {
+      this.conf = new OzoneConfiguration();
+    } else {
+      this.conf = new OzoneConfiguration(conf);
+    }
+  }
+
+  /**
+   * Starts HddsDatanode services.
+   *
+   * @param service The service instance invoking this method
+   */
+  @Override
+  public void start(Object service) {
+    OzoneConfiguration.activate();
+    if (service instanceof Configurable) {
+      conf = new OzoneConfiguration(((Configurable) service).getConf());
+    }
+    if (HddsUtils.isHddsEnabled(conf)) {
+      try {
+        String hostname = HddsUtils.getHostName(conf);
+        String ip = InetAddress.getByName(hostname).getHostAddress();
+        datanodeDetails = initializeDatanodeDetails();
+        datanodeDetails.setHostName(hostname);
+        datanodeDetails.setIpAddress(ip);
+        datanodeStateMachine = new DatanodeStateMachine(datanodeDetails, conf);
+        startPlugins();
+        // Starting HDDS Daemons
+        datanodeStateMachine.startDaemon();
+      } catch (IOException e) {
+        throw new RuntimeException("Can't start the HDDS datanode plugin", e);
+      }
+    }
+  }
+
+  /**
+   * Returns DatanodeDetails or null in case of Error.
+   *
+   * @return DatanodeDetails
+   */
+  private DatanodeDetails initializeDatanodeDetails()
+      throws IOException {
+    String idFilePath = HddsUtils.getDatanodeIdFilePath(conf);
+    if (idFilePath == null || idFilePath.isEmpty()) {
+      LOG.error("A valid file path is needed for config setting {}",
+          ScmConfigKeys.OZONE_SCM_DATANODE_ID);
+      throw new IllegalArgumentException(ScmConfigKeys.OZONE_SCM_DATANODE_ID +
+          " must be defined. See" +
+          " https://wiki.apache.org/hadoop/Ozone#Configuration" +
+          " for details on configuring Ozone.");
+    }
+
+    Preconditions.checkNotNull(idFilePath);
+    File idFile = new File(idFilePath);
+    if (idFile.exists()) {
+      return ContainerUtils.readDatanodeDetailsFrom(idFile);
+    } else {
+      // There is no datanode.id file, this might be the first time datanode
+      // is started.
+      String datanodeUuid = UUID.randomUUID().toString();
+      return DatanodeDetails.newBuilder().setUuid(datanodeUuid).build();
+    }
+  }
+
+  /**
+   * Starts all the service plugins which are configured using
+   * OzoneConfigKeys.HDDS_DATANODE_PLUGINS_KEY.
+   */
+  private void startPlugins() {
+    try {
+      plugins = conf.getInstances(HDDS_DATANODE_PLUGINS_KEY,
+          ServicePlugin.class);
+    } catch (RuntimeException e) {
+      String pluginsValue = conf.get(HDDS_DATANODE_PLUGINS_KEY);
+      LOG.error("Unable to load HDDS DataNode plugins. " +
+          "Specified list of plugins: {}",
+          pluginsValue, e);
+      throw e;
+    }
+    for (ServicePlugin plugin : plugins) {
+      try {
+        plugin.start(this);
+        LOG.info("Started plug-in {}", plugin);
+      } catch (Throwable t) {
+        LOG.warn("ServicePlugin {} could not be started", plugin, t);
+      }
+    }
+  }
+
+  /**
+   * Returns the OzoneConfiguration used by this HddsDatanodeService.
+   *
+   * @return OzoneConfiguration
+   */
+  public OzoneConfiguration getConf() {
+    return conf;
+  }
+  /**
+   *
+   * Return DatanodeDetails if set, return null otherwise.
+   *
+   * @return DatanodeDetails
+   */
+  @VisibleForTesting
+  public DatanodeDetails getDatanodeDetails() {
+    return datanodeDetails;
+  }
+
+  @VisibleForTesting
+  public DatanodeStateMachine getDatanodeStateMachine() {
+    return datanodeStateMachine;
+  }
+
+  public void join() {
+    try {
+      datanodeStateMachine.join();
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      LOG.info("Interrupted during StorageContainerManager join.");
+    }
+  }
+
+  @Override
+  public void stop() {
+    if (plugins != null) {
+      for (ServicePlugin plugin : plugins) {
+        try {
+          plugin.stop();
+          LOG.info("Stopped plug-in {}", plugin);
+        } catch (Throwable t) {
+          LOG.warn("ServicePlugin {} could not be stopped", plugin, t);
+        }
+      }
+    }
+    if (datanodeStateMachine != null) {
+      datanodeStateMachine.stopDaemon();
+    }
+  }
+
+  @Override
+  public void close() throws IOException {
+    if (plugins != null) {
+      for (ServicePlugin plugin : plugins) {
+        try {
+          plugin.close();
+        } catch (Throwable t) {
+          LOG.warn("ServicePlugin {} could not be closed", plugin, t);
+        }
+      }
+    }
+  }
+
+  public static HddsDatanodeService createHddsDatanodeService(
+      Configuration conf) {
+    return new HddsDatanodeService(conf);
+  }
+
+  public static void main(String[] args) {
+    try {
+      StringUtils.startupShutdownMessage(HddsDatanodeService.class, args, LOG);
+      HddsDatanodeService hddsDatanodeService =
+          createHddsDatanodeService(new OzoneConfiguration());
+      hddsDatanodeService.start(null);
+      hddsDatanodeService.join();
+    } catch (Throwable e) {
+      LOG.error("Exception in HddsDatanodeService.", e);
+      terminate(1, e);
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java
new file mode 100644
index 0000000..68bf442
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ChunkUtils.java
@@ -0,0 +1,346 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import com.google.protobuf.ByteString;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.AsynchronousFileChannel;
+import java.nio.channels.FileLock;
+import java.nio.file.StandardOpenOption;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.concurrent.ExecutionException;
+
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CHECKSUM_MISMATCH;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CONTAINER_INTERNAL_ERROR;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CONTAINER_NOT_FOUND;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .INVALID_WRITE_SIZE;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .IO_EXCEPTION;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .OVERWRITE_FLAG_REQUIRED;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNABLE_TO_FIND_CHUNK;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNABLE_TO_FIND_DATA_DIR;
+
+/**
+ * Set of utility functions used by the chunk Manager.
+ */
+public final class ChunkUtils {
+
+  /* Never constructed. */
+  private ChunkUtils() {
+  }
+
+  /**
+   * Checks if we are getting a request to overwrite an existing range of
+   * chunk.
+   *
+   * @param chunkFile - File
+   * @param chunkInfo - Buffer to write
+   * @return bool
+   */
+  public static boolean isOverWriteRequested(File chunkFile, ChunkInfo
+      chunkInfo) {
+
+    if (!chunkFile.exists()) {
+      return false;
+    }
+
+    long offset = chunkInfo.getOffset();
+    return offset < chunkFile.length();
+  }
+
+  /**
+   * Overwrite is permitted if an only if the user explicitly asks for it. We
+   * permit this iff the key/value pair contains a flag called
+   * [OverWriteRequested, true].
+   *
+   * @param chunkInfo - Chunk info
+   * @return true if the user asks for it.
+   */
+  public static boolean isOverWritePermitted(ChunkInfo chunkInfo) {
+    String overWrite = chunkInfo.getMetadata().get(OzoneConsts.CHUNK_OVERWRITE);
+    return (overWrite != null) &&
+        (!overWrite.isEmpty()) &&
+        (Boolean.valueOf(overWrite));
+  }
+
+  /**
+   * Validates chunk data and returns a file object to Chunk File that we are
+   * expected to write data to.
+   *
+   * @param pipeline - pipeline.
+   * @param data - container data.
+   * @param info - chunk info.
+   * @return File
+   * @throws StorageContainerException
+   */
+  public static File validateChunk(Pipeline pipeline, ContainerData data,
+      ChunkInfo info) throws StorageContainerException {
+
+    Logger log = LoggerFactory.getLogger(ChunkManagerImpl.class);
+
+    File chunkFile = getChunkFile(pipeline, data, info);
+    if (ChunkUtils.isOverWriteRequested(chunkFile, info)) {
+      if (!ChunkUtils.isOverWritePermitted(info)) {
+        log.error("Rejecting write chunk request. Chunk overwrite " +
+            "without explicit request. {}", info.toString());
+        throw new StorageContainerException("Rejecting write chunk request. " +
+            "OverWrite flag required." + info.toString(),
+            OVERWRITE_FLAG_REQUIRED);
+      }
+    }
+    return chunkFile;
+  }
+
+  /**
+   * Validates that Path to chunk file exists.
+   *
+   * @param pipeline - Container Info.
+   * @param data - Container Data
+   * @param info - Chunk info
+   * @return - File.
+   * @throws StorageContainerException
+   */
+  public static File getChunkFile(Pipeline pipeline, ContainerData data,
+      ChunkInfo info) throws StorageContainerException {
+
+    Logger log = LoggerFactory.getLogger(ChunkManagerImpl.class);
+    if (data == null) {
+      log.error("Invalid container Name: {}", pipeline.getContainerName());
+      throw new StorageContainerException("Unable to find the container Name:" +
+          " " +
+          pipeline.getContainerName(), CONTAINER_NOT_FOUND);
+    }
+
+    File dataDir = ContainerUtils.getDataDirectory(data).toFile();
+    if (!dataDir.exists()) {
+      log.error("Unable to find the data directory: {}", dataDir);
+      throw new StorageContainerException("Unable to find the data directory:" +
+          " " + dataDir, UNABLE_TO_FIND_DATA_DIR);
+    }
+
+    return dataDir.toPath().resolve(info.getChunkName()).toFile();
+
+  }
+
+  /**
+   * Writes the data in chunk Info to the specified location in the chunkfile.
+   *
+   * @param chunkFile - File to write data to.
+   * @param chunkInfo - Data stream to write.
+   * @param data - The data buffer.
+   * @throws StorageContainerException
+   */
+  public static void writeData(File chunkFile, ChunkInfo chunkInfo,
+      byte[] data) throws
+      StorageContainerException, ExecutionException, InterruptedException,
+      NoSuchAlgorithmException {
+
+    Logger log = LoggerFactory.getLogger(ChunkManagerImpl.class);
+    if (data.length != chunkInfo.getLen()) {
+      String err = String.format("data array does not match the length " +
+              "specified. DataLen: %d Byte Array: %d",
+          chunkInfo.getLen(), data.length);
+      log.error(err);
+      throw new StorageContainerException(err, INVALID_WRITE_SIZE);
+    }
+
+    AsynchronousFileChannel file = null;
+    FileLock lock = null;
+
+    try {
+      file =
+          AsynchronousFileChannel.open(chunkFile.toPath(),
+              StandardOpenOption.CREATE,
+              StandardOpenOption.WRITE,
+              StandardOpenOption.SPARSE,
+              StandardOpenOption.SYNC);
+      lock = file.lock().get();
+      if (chunkInfo.getChecksum() != null &&
+          !chunkInfo.getChecksum().isEmpty()) {
+        verifyChecksum(chunkInfo, data, log);
+      }
+      int size = file.write(ByteBuffer.wrap(data), chunkInfo.getOffset()).get();
+      if (size != data.length) {
+        log.error("Invalid write size found. Size:{}  Expected: {} ", size,
+            data.length);
+        throw new StorageContainerException("Invalid write size found. " +
+            "Size: " + size + " Expected: " + data.length, INVALID_WRITE_SIZE);
+      }
+    } catch (IOException e) {
+      throw new StorageContainerException(e, IO_EXCEPTION);
+
+    } finally {
+      if (lock != null) {
+        try {
+          lock.release();
+        } catch (IOException e) {
+          log.error("Unable to release lock ??, Fatal Error.");
+          throw new StorageContainerException(e, CONTAINER_INTERNAL_ERROR);
+
+        }
+      }
+      if (file != null) {
+        try {
+          file.close();
+        } catch (IOException e) {
+          throw new StorageContainerException("Error closing chunk file",
+              e, CONTAINER_INTERNAL_ERROR);
+        }
+      }
+    }
+  }
+
+  /**
+   * Verifies the checksum of a chunk against the data buffer.
+   *
+   * @param chunkInfo - Chunk Info.
+   * @param data - data buffer
+   * @param log - log
+   * @throws NoSuchAlgorithmException
+   * @throws StorageContainerException
+   */
+  private static void verifyChecksum(ChunkInfo chunkInfo, byte[] data, Logger
+      log) throws NoSuchAlgorithmException, StorageContainerException {
+    MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+    sha.update(data);
+    if (!Hex.encodeHexString(sha.digest()).equals(
+        chunkInfo.getChecksum())) {
+      log.error("Checksum mismatch. Provided: {} , computed: {}",
+          chunkInfo.getChecksum(), DigestUtils.sha256Hex(sha.digest()));
+      throw new StorageContainerException("Checksum mismatch. Provided: " +
+          chunkInfo.getChecksum() + " , computed: " +
+          DigestUtils.sha256Hex(sha.digest()), CHECKSUM_MISMATCH);
+    }
+  }
+
+  /**
+   * Reads data from an existing chunk file.
+   *
+   * @param chunkFile - file where data lives.
+   * @param data - chunk definition.
+   * @return ByteBuffer
+   * @throws StorageContainerException
+   * @throws ExecutionException
+   * @throws InterruptedException
+   */
+  public static ByteBuffer readData(File chunkFile, ChunkInfo data) throws
+      StorageContainerException, ExecutionException, InterruptedException,
+      NoSuchAlgorithmException {
+    Logger log = LoggerFactory.getLogger(ChunkManagerImpl.class);
+
+    if (!chunkFile.exists()) {
+      log.error("Unable to find the chunk file. chunk info : {}",
+          data.toString());
+      throw new StorageContainerException("Unable to find the chunk file. " +
+          "chunk info " +
+          data.toString(), UNABLE_TO_FIND_CHUNK);
+    }
+
+    AsynchronousFileChannel file = null;
+    FileLock lock = null;
+    try {
+      file =
+          AsynchronousFileChannel.open(chunkFile.toPath(),
+              StandardOpenOption.READ);
+      lock = file.lock(data.getOffset(), data.getLen(), true).get();
+
+      ByteBuffer buf = ByteBuffer.allocate((int) data.getLen());
+      file.read(buf, data.getOffset()).get();
+
+      if (data.getChecksum() != null && !data.getChecksum().isEmpty()) {
+        verifyChecksum(data, buf.array(), log);
+      }
+
+      return buf;
+    } catch (IOException e) {
+      throw new StorageContainerException(e, IO_EXCEPTION);
+    } finally {
+      if (lock != null) {
+        try {
+          lock.release();
+        } catch (IOException e) {
+          log.error("I/O error is lock release.");
+        }
+      }
+      if (file != null) {
+        IOUtils.closeStream(file);
+      }
+    }
+  }
+
+  /**
+   * Returns a CreateContainer Response. This call is used by create and delete
+   * containers which have null success responses.
+   *
+   * @param msg Request
+   * @return Response.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      getChunkResponse(ContainerProtos.ContainerCommandRequestProto msg) {
+    return ContainerUtils.getContainerResponse(msg);
+  }
+
+  /**
+   * Gets a response to the read chunk calls.
+   *
+   * @param msg - Msg
+   * @param data - Data
+   * @param info - Info
+   * @return Response.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      getReadChunkResponse(ContainerProtos.ContainerCommandRequestProto msg,
+      byte[] data, ChunkInfo info) {
+    Preconditions.checkNotNull(msg);
+
+    ContainerProtos.ReadChunkResponseProto.Builder response =
+        ContainerProtos.ReadChunkResponseProto.newBuilder();
+    response.setChunkData(info.getProtoBufMessage());
+    response.setData(ByteString.copyFrom(data));
+    response.setPipeline(msg.getReadChunk().getPipeline());
+
+    ContainerProtos.ContainerCommandResponseProto.Builder builder =
+        ContainerUtils.getContainerResponse(msg, ContainerProtos.Result
+            .SUCCESS, "");
+    builder.setReadChunk(response);
+    return builder.build();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java
new file mode 100644
index 0000000..c29374c
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerData.java
@@ -0,0 +1,326 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.util.Time;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * This class maintains the information about a container in the ozone world.
+ * <p>
+ * A container is a name, along with metadata- which is a set of key value
+ * pair.
+ */
+public class ContainerData {
+
+  private final String containerName;
+  private final Map<String, String> metadata;
+  private String dbPath;  // Path to Level DB Store.
+  // Path to Physical file system where container and checksum are stored.
+  private String containerFilePath;
+  private String hash;
+  private AtomicLong bytesUsed;
+  private long maxSize;
+  private Long containerID;
+  private HddsProtos.LifeCycleState state;
+
+  /**
+   * Constructs a  ContainerData Object.
+   *
+   * @param containerName - Name
+   */
+  public ContainerData(String containerName, Long containerID,
+      Configuration conf) {
+    this.metadata = new TreeMap<>();
+    this.containerName = containerName;
+    this.maxSize = conf.getLong(ScmConfigKeys.SCM_CONTAINER_CLIENT_MAX_SIZE_KEY,
+        ScmConfigKeys.SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT) * OzoneConsts.GB;
+    this.bytesUsed =  new AtomicLong(0L);
+    this.containerID = containerID;
+    this.state = HddsProtos.LifeCycleState.OPEN;
+  }
+
+  /**
+   * Constructs a ContainerData object from ProtoBuf classes.
+   *
+   * @param protoData - ProtoBuf Message
+   * @throws IOException
+   */
+  public static ContainerData getFromProtBuf(
+      ContainerProtos.ContainerData protoData, Configuration conf)
+      throws IOException {
+    ContainerData data = new ContainerData(protoData.getName(),
+        protoData.getContainerID(), conf);
+    for (int x = 0; x < protoData.getMetadataCount(); x++) {
+      data.addMetadata(protoData.getMetadata(x).getKey(),
+          protoData.getMetadata(x).getValue());
+    }
+
+    if (protoData.hasContainerPath()) {
+      data.setContainerPath(protoData.getContainerPath());
+    }
+
+    if (protoData.hasDbPath()) {
+      data.setDBPath(protoData.getDbPath());
+    }
+
+    if (protoData.hasState()) {
+      data.setState(protoData.getState());
+    }
+
+    if(protoData.hasHash()) {
+      data.setHash(protoData.getHash());
+    }
+
+    if (protoData.hasBytesUsed()) {
+      data.setBytesUsed(protoData.getBytesUsed());
+    }
+
+    if (protoData.hasSize()) {
+      data.setMaxSize(protoData.getSize());
+    }
+    return data;
+  }
+
+  /**
+   * Returns a ProtoBuf Message from ContainerData.
+   *
+   * @return Protocol Buffer Message
+   */
+  public ContainerProtos.ContainerData getProtoBufMessage() {
+    ContainerProtos.ContainerData.Builder builder = ContainerProtos
+        .ContainerData.newBuilder();
+    builder.setName(this.getContainerName());
+    builder.setContainerID(this.getContainerID());
+
+    if (this.getDBPath() != null) {
+      builder.setDbPath(this.getDBPath());
+    }
+
+    if (this.getHash() != null) {
+      builder.setHash(this.getHash());
+    }
+
+    if (this.getContainerPath() != null) {
+      builder.setContainerPath(this.getContainerPath());
+    }
+
+    builder.setState(this.getState());
+
+    for (Map.Entry<String, String> entry : metadata.entrySet()) {
+      HddsProtos.KeyValue.Builder keyValBuilder =
+          HddsProtos.KeyValue.newBuilder();
+      builder.addMetadata(keyValBuilder.setKey(entry.getKey())
+          .setValue(entry.getValue()).build());
+    }
+
+    if (this.getBytesUsed() >= 0) {
+      builder.setBytesUsed(this.getBytesUsed());
+    }
+
+    if (this.getKeyCount() >= 0) {
+      builder.setKeyCount(this.getKeyCount());
+    }
+
+    if (this.getMaxSize() >= 0) {
+      builder.setSize(this.getMaxSize());
+    }
+
+    return builder.build();
+  }
+
+  /**
+   * Returns the name of the container.
+   *
+   * @return - name
+   */
+  public String getContainerName() {
+    return containerName;
+  }
+
+  /**
+   * Adds metadata.
+   */
+  public void addMetadata(String key, String value) throws IOException {
+    synchronized (this.metadata) {
+      if (this.metadata.containsKey(key)) {
+        throw new IOException("This key already exists. Key " + key);
+      }
+      metadata.put(key, value);
+    }
+  }
+
+  /**
+   * Returns all metadata.
+   */
+  public Map<String, String> getAllMetadata() {
+    synchronized (this.metadata) {
+      return Collections.unmodifiableMap(this.metadata);
+    }
+  }
+
+  /**
+   * Returns value of a key.
+   */
+  public String getValue(String key) {
+    synchronized (this.metadata) {
+      return metadata.get(key);
+    }
+  }
+
+  /**
+   * Deletes a metadata entry from the map.
+   *
+   * @param key - Key
+   */
+  public void deleteKey(String key) {
+    synchronized (this.metadata) {
+      metadata.remove(key);
+    }
+  }
+
+  /**
+   * Returns path.
+   *
+   * @return - path
+   */
+  public String getDBPath() {
+    return dbPath;
+  }
+
+  /**
+   * Sets path.
+   *
+   * @param path - String.
+   */
+  public void setDBPath(String path) {
+    this.dbPath = path;
+  }
+
+  /**
+   * This function serves as the generic key for ContainerCache class. Both
+   * ContainerData and ContainerKeyData overrides this function to appropriately
+   * return the right name that can  be used in ContainerCache.
+   *
+   * @return String Name.
+   */
+  public String getName() {
+    return getContainerName();
+  }
+
+  /**
+   * Get container file path.
+   * @return - Physical path where container file and checksum is stored.
+   */
+  public String getContainerPath() {
+    return containerFilePath;
+  }
+
+  /**
+   * Set container Path.
+   * @param containerPath - File path.
+   */
+  public void setContainerPath(String containerPath) {
+    this.containerFilePath = containerPath;
+  }
+
+  /**
+   * Get container ID.
+   * @return - container ID.
+   */
+  public synchronized Long getContainerID() {
+    return containerID;
+  }
+
+  public synchronized  void setState(HddsProtos.LifeCycleState state) {
+    this.state = state;
+  }
+
+  public synchronized HddsProtos.LifeCycleState getState() {
+    return this.state;
+  }
+
+  /**
+   * checks if the container is open.
+   * @return - boolean
+   */
+  public synchronized  boolean isOpen() {
+    return HddsProtos.LifeCycleState.OPEN == state;
+  }
+
+  /**
+   * Marks this container as closed.
+   */
+  public synchronized void closeContainer() {
+    // TODO: closed or closing here
+    setState(HddsProtos.LifeCycleState.CLOSED);
+
+    // Some thing brain dead for now. name + Time stamp of when we get the close
+    // container message.
+    setHash(DigestUtils.sha256Hex(this.getContainerName() +
+        Long.toString(Time.monotonicNow())));
+  }
+
+  /**
+   * Final hash for this container.
+   * @return - Hash
+   */
+  public String getHash() {
+    return hash;
+  }
+
+  public void setHash(String hash) {
+    this.hash = hash;
+  }
+
+  public void setMaxSize(long maxSize) {
+    this.maxSize = maxSize;
+  }
+
+  public long getMaxSize() {
+    return maxSize;
+  }
+
+  public long getKeyCount() {
+    return metadata.size();
+  }
+
+  public void setBytesUsed(long used) {
+    this.bytesUsed.set(used);
+  }
+
+  public long addBytesUsed(long delta) {
+    return this.bytesUsed.addAndGet(delta);
+  }
+
+  public long getBytesUsed() {
+    return bytesUsed.get();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java
new file mode 100644
index 0000000..d4d732b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerMetrics.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableQuantiles;
+import org.apache.hadoop.metrics2.lib.MutableRate;
+
+/**
+ *
+ * This class is for maintaining  the various Storage Container
+ * DataNode statistics and publishing them through the metrics interfaces.
+ * This also registers the JMX MBean for RPC.
+ * <p>
+ * This class has a number of metrics variables that are publicly accessible;
+ * these variables (objects) have methods to update their values;
+ *  for example:
+ *  <p> {@link #numOps}.inc()
+ *
+ */
+@InterfaceAudience.Private
+@Metrics(about="Storage Container DataNode Metrics", context="dfs")
+public class ContainerMetrics {
+  @Metric private MutableCounterLong numOps;
+  private MutableCounterLong[] numOpsArray;
+  private MutableCounterLong[] opsBytesArray;
+  private MutableRate[] opsLatency;
+  private MutableQuantiles[][] opsLatQuantiles;
+  private MetricsRegistry registry = null;
+
+  public ContainerMetrics(int[] intervals) {
+    int numEnumEntries = ContainerProtos.Type.values().length;
+    final int len = intervals.length;
+    this.numOpsArray = new MutableCounterLong[numEnumEntries];
+    this.opsBytesArray = new MutableCounterLong[numEnumEntries];
+    this.opsLatency = new MutableRate[numEnumEntries];
+    this.opsLatQuantiles = new MutableQuantiles[numEnumEntries][len];
+    this.registry = new MetricsRegistry("StorageContainerMetrics");
+    for (int i = 0; i < numEnumEntries; i++) {
+      numOpsArray[i] = registry.newCounter(
+          "num" + ContainerProtos.Type.valueOf(i + 1),
+          "number of " + ContainerProtos.Type.valueOf(i + 1) + " ops",
+          (long) 0);
+      opsBytesArray[i] = registry.newCounter(
+          "bytes" + ContainerProtos.Type.valueOf(i + 1),
+          "bytes used by " + ContainerProtos.Type.valueOf(i + 1) + "op",
+          (long) 0);
+      opsLatency[i] = registry.newRate(
+          "latency" + ContainerProtos.Type.valueOf(i + 1),
+          ContainerProtos.Type.valueOf(i + 1) + " op");
+
+      for (int j = 0; j < len; j++) {
+        int interval = intervals[j];
+        String quantileName = ContainerProtos.Type.valueOf(i + 1) + "Nanos"
+            + interval + "s";
+        opsLatQuantiles[i][j] = registry.newQuantiles(quantileName,
+            "latency of Container ops", "ops", "latency", interval);
+      }
+    }
+  }
+
+  public static ContainerMetrics create(Configuration conf) {
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    // Percentile measurement is off by default, by watching no intervals
+    int[] intervals =
+             conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
+    return ms.register("StorageContainerMetrics",
+                       "Storage Container Node Metrics",
+                       new ContainerMetrics(intervals));
+  }
+
+  public void incContainerOpcMetrics(ContainerProtos.Type type){
+    numOps.incr();
+    numOpsArray[type.ordinal()].incr();
+  }
+
+  public long getContainerOpsMetrics(ContainerProtos.Type type){
+    return numOpsArray[type.ordinal()].value();
+  }
+
+  public void incContainerOpsLatencies(ContainerProtos.Type type,
+                                       long latencyNanos) {
+    opsLatency[type.ordinal()].add(latencyNanos);
+    for (MutableQuantiles q: opsLatQuantiles[type.ordinal()]) {
+      q.add(latencyNanos);
+    }
+  }
+
+  public void incContainerBytesStats(ContainerProtos.Type type, long bytes) {
+    opsBytesArray[type.ordinal()].incr(bytes);
+  }
+
+  public long getContainerBytesMetrics(ContainerProtos.Type type){
+    return opsBytesArray[type.ordinal()].value();
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java
new file mode 100644
index 0000000..50d2da3
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerInfo;
+
+/**
+ * Container Report iterates the closed containers and sends a container report
+ * to SCM.
+ */
+public class ContainerReport {
+  private static final int UNKNOWN = -1;
+  private final String containerName;
+  private final String finalhash;
+  private long size;
+  private long keyCount;
+  private long bytesUsed;
+  private long readCount;
+  private long writeCount;
+  private long readBytes;
+  private long writeBytes;
+  private long containerID;
+
+  public long getContainerID() {
+    return containerID;
+  }
+
+  public void setContainerID(long containerID) {
+    this.containerID = containerID;
+  }
+
+
+
+
+  /**
+   * Constructs the ContainerReport.
+   *
+   * @param containerName - Container Name.
+   * @param finalhash - Final Hash.
+   */
+  public ContainerReport(String containerName, String finalhash) {
+    this.containerName = containerName;
+    this.finalhash = finalhash;
+    this.size = UNKNOWN;
+    this.keyCount = UNKNOWN;
+    this.bytesUsed = 0L;
+    this.readCount = 0L;
+    this.readBytes = 0L;
+    this.writeCount = 0L;
+    this.writeBytes = 0L;
+  }
+
+  /**
+   * Gets a containerReport from protobuf class.
+   *
+   * @param info - ContainerInfo.
+   * @return - ContainerReport.
+   */
+  public static ContainerReport getFromProtoBuf(ContainerInfo info) {
+    Preconditions.checkNotNull(info);
+    ContainerReport report = new ContainerReport(info.getContainerName(),
+        info.getFinalhash());
+    if (info.hasSize()) {
+      report.setSize(info.getSize());
+    }
+    if (info.hasKeyCount()) {
+      report.setKeyCount(info.getKeyCount());
+    }
+    if (info.hasUsed()) {
+      report.setBytesUsed(info.getUsed());
+    }
+    if (info.hasReadCount()) {
+      report.setReadCount(info.getReadCount());
+    }
+    if (info.hasReadBytes()) {
+      report.setReadBytes(info.getReadBytes());
+    }
+    if (info.hasWriteCount()) {
+      report.setWriteCount(info.getWriteCount());
+    }
+    if (info.hasWriteBytes()) {
+      report.setWriteBytes(info.getWriteBytes());
+    }
+
+    report.setContainerID(info.getContainerID());
+    return report;
+  }
+
+  /**
+   * Gets the container name.
+   *
+   * @return - Name
+   */
+  public String getContainerName() {
+    return containerName;
+  }
+
+  /**
+   * Returns the final signature for this container.
+   *
+   * @return - hash
+   */
+  public String getFinalhash() {
+    return finalhash;
+  }
+
+  /**
+   * Returns a positive number it is a valid number, -1 if not known.
+   *
+   * @return size or -1
+   */
+  public long getSize() {
+    return size;
+  }
+
+  /**
+   * Sets the size of the container on disk.
+   *
+   * @param size - int
+   */
+  public void setSize(long size) {
+    this.size = size;
+  }
+
+  /**
+   * Gets number of keys in the container if known.
+   *
+   * @return - Number of keys or -1 for not known.
+   */
+  public long getKeyCount() {
+    return keyCount;
+  }
+
+  /**
+   * Sets the key count.
+   *
+   * @param keyCount - Key Count
+   */
+  public void setKeyCount(long keyCount) {
+    this.keyCount = keyCount;
+  }
+
+  public long getReadCount() {
+    return readCount;
+  }
+
+  public void setReadCount(long readCount) {
+    this.readCount = readCount;
+  }
+
+  public long getWriteCount() {
+    return writeCount;
+  }
+
+  public void setWriteCount(long writeCount) {
+    this.writeCount = writeCount;
+  }
+
+  public long getReadBytes() {
+    return readBytes;
+  }
+
+  public void setReadBytes(long readBytes) {
+    this.readBytes = readBytes;
+  }
+
+  public long getWriteBytes() {
+    return writeBytes;
+  }
+
+  public void setWriteBytes(long writeBytes) {
+    this.writeBytes = writeBytes;
+  }
+
+  public long getBytesUsed() {
+    return bytesUsed;
+  }
+
+  public void setBytesUsed(long bytesUsed) {
+    this.bytesUsed = bytesUsed;
+  }
+
+  /**
+   * Gets a containerInfo protobuf message from ContainerReports.
+   *
+   * @return ContainerInfo
+   */
+  public ContainerInfo getProtoBufMessage() {
+    return ContainerInfo.newBuilder()
+        .setContainerName(this.getContainerName())
+        .setKeyCount(this.getKeyCount())
+        .setSize(this.getSize())
+        .setUsed(this.getBytesUsed())
+        .setReadCount(this.getReadCount())
+        .setReadBytes(this.getReadBytes())
+        .setWriteCount(this.getWriteCount())
+        .setWriteBytes(this.getWriteBytes())
+        .setFinalhash(this.getFinalhash())
+        .setContainerID(this.getContainerID())
+        .build();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
new file mode 100644
index 0000000..1818188
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
@@ -0,0 +1,442 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import static org.apache.commons.io.FilenameUtils.removeExtension;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .INVALID_ARGUMENT;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNABLE_TO_FIND_DATA_DIR;
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION;
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_META;
+
+/**
+ * A set of helper functions to create proper responses.
+ */
+public final class ContainerUtils {
+
+  private ContainerUtils() {
+    //never constructed.
+  }
+
+  /**
+   * Returns a CreateContainer Response. This call is used by create and delete
+   * containers which have null success responses.
+   *
+   * @param msg Request
+   * @return Response.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      getContainerResponse(ContainerProtos.ContainerCommandRequestProto msg) {
+    ContainerProtos.ContainerCommandResponseProto.Builder builder =
+        getContainerResponse(msg, ContainerProtos.Result.SUCCESS, "");
+    return builder.build();
+  }
+
+  /**
+   * Returns a ReadContainer Response.
+   *
+   * @param msg Request
+   * @param containerData - data
+   * @return Response.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      getReadContainerResponse(ContainerProtos.ContainerCommandRequestProto msg,
+      ContainerData containerData) {
+    Preconditions.checkNotNull(containerData);
+
+    ContainerProtos.ReadContainerResponseProto.Builder response =
+        ContainerProtos.ReadContainerResponseProto.newBuilder();
+    response.setContainerData(containerData.getProtoBufMessage());
+
+    ContainerProtos.ContainerCommandResponseProto.Builder builder =
+        getContainerResponse(msg, ContainerProtos.Result.SUCCESS, "");
+    builder.setReadContainer(response);
+    return builder.build();
+  }
+
+  /**
+   * We found a command type but no associated payload for the command. Hence
+   * return malformed Command as response.
+   *
+   * @param msg - Protobuf message.
+   * @param result - result
+   * @param message - Error message.
+   * @return ContainerCommandResponseProto - MALFORMED_REQUEST.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto.Builder
+      getContainerResponse(ContainerProtos.ContainerCommandRequestProto msg,
+      ContainerProtos.Result result, String message) {
+    return
+        ContainerProtos.ContainerCommandResponseProto.newBuilder()
+            .setCmdType(msg.getCmdType())
+            .setTraceID(msg.getTraceID())
+            .setResult(result)
+            .setMessage(message);
+  }
+
+  /**
+   * Logs the error and returns a response to the caller.
+   *
+   * @param log - Logger
+   * @param ex - Exception
+   * @param msg - Request Object
+   * @return Response
+   */
+  public static ContainerProtos.ContainerCommandResponseProto logAndReturnError(
+      Logger log, StorageContainerException ex,
+      ContainerProtos.ContainerCommandRequestProto msg) {
+    log.info("Operation: {} : Trace ID: {} : Message: {} : Result: {}",
+        msg.getCmdType().name(), msg.getTraceID(),
+        ex.getMessage(), ex.getResult().getValueDescriptor().getName());
+    return getContainerResponse(msg, ex.getResult(), ex.getMessage()).build();
+  }
+
+  /**
+   * Logs the error and returns a response to the caller.
+   *
+   * @param log - Logger
+   * @param ex - Exception
+   * @param msg - Request Object
+   * @return Response
+   */
+  public static ContainerProtos.ContainerCommandResponseProto logAndReturnError(
+      Logger log, RuntimeException ex,
+      ContainerProtos.ContainerCommandRequestProto msg) {
+    log.info("Operation: {} : Trace ID: {} : Message: {} ",
+        msg.getCmdType().name(), msg.getTraceID(), ex.getMessage());
+    return getContainerResponse(msg, INVALID_ARGUMENT, ex.getMessage()).build();
+  }
+
+  /**
+   * We found a command type but no associated payload for the command. Hence
+   * return malformed Command as response.
+   *
+   * @param msg - Protobuf message.
+   * @return ContainerCommandResponseProto - MALFORMED_REQUEST.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      malformedRequest(ContainerProtos.ContainerCommandRequestProto msg) {
+    return getContainerResponse(msg, ContainerProtos.Result.MALFORMED_REQUEST,
+        "Cmd type does not match the payload.").build();
+  }
+
+  /**
+   * We found a command type that is not supported yet.
+   *
+   * @param msg - Protobuf message.
+   * @return ContainerCommandResponseProto - MALFORMED_REQUEST.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      unsupportedRequest(ContainerProtos.ContainerCommandRequestProto msg) {
+    return getContainerResponse(msg, ContainerProtos.Result.UNSUPPORTED_REQUEST,
+        "Server does not support this command yet.").build();
+  }
+
+  /**
+   * get containerName from a container file.
+   *
+   * @param containerFile - File
+   * @return Name of the container.
+   */
+  public static String getContainerNameFromFile(File containerFile) {
+    Preconditions.checkNotNull(containerFile);
+    return Paths.get(containerFile.getParent()).resolve(
+        removeExtension(containerFile.getName())).toString();
+  }
+
+  /**
+   * Verifies that this in indeed a new container.
+   *
+   * @param containerFile - Container File to verify
+   * @param metadataFile - metadata File to verify
+   * @throws IOException
+   */
+  public static void verifyIsNewContainer(File containerFile, File metadataFile)
+      throws IOException {
+    Logger log = LoggerFactory.getLogger(ContainerManagerImpl.class);
+    if (containerFile.exists()) {
+      log.error("container already exists on disk. File: {}",
+          containerFile.toPath());
+      throw new FileAlreadyExistsException("container already exists on " +
+          "disk.");
+    }
+
+    if (metadataFile.exists()) {
+      log.error("metadata found on disk, but missing container. Refusing to" +
+          " write this container. File: {} ", metadataFile.toPath());
+      throw new FileAlreadyExistsException(("metadata found on disk, but " +
+          "missing container. Refusing to write this container."));
+    }
+
+    File parentPath = new File(containerFile.getParent());
+
+    if (!parentPath.exists() && !parentPath.mkdirs()) {
+      log.error("Unable to create parent path. Path: {}",
+          parentPath.toString());
+      throw new IOException("Unable to create container directory.");
+    }
+
+    if (!containerFile.createNewFile()) {
+      log.error("creation of a new container file failed. File: {}",
+          containerFile.toPath());
+      throw new IOException("creation of a new container file failed.");
+    }
+
+    if (!metadataFile.createNewFile()) {
+      log.error("creation of the metadata file failed. File: {}",
+          metadataFile.toPath());
+      throw new IOException("creation of a new container file failed.");
+    }
+  }
+
+  public static String getContainerDbFileName(String containerName) {
+    return containerName + OzoneConsts.DN_CONTAINER_DB;
+  }
+
+  /**
+   * creates a Metadata DB for the specified container.
+   *
+   * @param containerPath - Container Path.
+   * @throws IOException
+   */
+  public static Path createMetadata(Path containerPath, String containerName,
+      Configuration conf)
+      throws IOException {
+    Logger log = LoggerFactory.getLogger(ContainerManagerImpl.class);
+    Preconditions.checkNotNull(containerPath);
+    Path metadataPath = containerPath.resolve(OzoneConsts.CONTAINER_META_PATH);
+    if (!metadataPath.toFile().mkdirs()) {
+      log.error("Unable to create directory for metadata storage. Path: {}",
+          metadataPath);
+      throw new IOException("Unable to create directory for metadata storage." +
+          " Path: " + metadataPath);
+    }
+    MetadataStore store = MetadataStoreBuilder.newBuilder()
+        .setConf(conf)
+        .setCreateIfMissing(true)
+        .setDbFile(metadataPath
+            .resolve(getContainerDbFileName(containerName)).toFile())
+        .build();
+
+    // we close since the SCM pre-creates containers.
+    // we will open and put Db handle into a cache when keys are being created
+    // in a container.
+
+    store.close();
+
+    Path dataPath = containerPath.resolve(OzoneConsts.CONTAINER_DATA_PATH);
+    if (!dataPath.toFile().mkdirs()) {
+
+      // If we failed to create data directory, we cleanup the
+      // metadata directory completely. That is, we will delete the
+      // whole directory including LevelDB file.
+      log.error("Unable to create directory for data storage. cleaning up the" +
+              " container path: {} dataPath: {}",
+          containerPath, dataPath);
+      FileUtils.deleteDirectory(containerPath.toFile());
+      throw new IOException("Unable to create directory for data storage." +
+          " Path: " + dataPath);
+    }
+    return metadataPath;
+  }
+
+  /**
+   * Returns Metadata location.
+   *
+   * @param containerData - Data
+   * @param location - Path
+   * @return Path
+   */
+  public static File getMetadataFile(ContainerData containerData,
+      Path location) {
+    return location.resolve(containerData
+        .getContainerName().concat(CONTAINER_META))
+        .toFile();
+  }
+
+  /**
+   * Returns container file location.
+   *
+   * @param containerData - Data
+   * @param location - Root path
+   * @return Path
+   */
+  public static File getContainerFile(ContainerData containerData,
+      Path location) {
+    return location.resolve(containerData
+        .getContainerName().concat(CONTAINER_EXTENSION))
+        .toFile();
+  }
+
+  /**
+   * Container metadata directory -- here is where the level DB lives.
+   *
+   * @param cData - cData.
+   * @return Path to the parent directory where the DB lives.
+   */
+  public static Path getMetadataDirectory(ContainerData cData) {
+    Path dbPath = Paths.get(cData.getDBPath());
+    Preconditions.checkNotNull(dbPath);
+    Preconditions.checkState(dbPath.toString().length() > 0);
+    return dbPath.getParent();
+  }
+
+  /**
+   * Returns the path where data or chunks live for a given container.
+   *
+   * @param cData - cData container
+   * @return - Path
+   * @throws StorageContainerException
+   */
+  public static Path getDataDirectory(ContainerData cData)
+      throws StorageContainerException {
+    Path path = getMetadataDirectory(cData);
+    Preconditions.checkNotNull(path);
+    Path parentPath = path.getParent();
+    if (parentPath == null) {
+      throw new StorageContainerException("Unable to get Data directory."
+          + path, UNABLE_TO_FIND_DATA_DIR);
+    }
+    return parentPath.resolve(OzoneConsts.CONTAINER_DATA_PATH);
+  }
+
+  /**
+   * remove Container if it is empty.
+   * <p/>
+   * There are three things we need to delete.
+   * <p/>
+   * 1. Container file and metadata file. 2. The Level DB file 3. The path that
+   * we created on the data location.
+   *
+   * @param containerData - Data of the container to remove.
+   * @param conf - configuration of the cluster.
+   * @param forceDelete - whether this container should be deleted forcibly.
+   * @throws IOException
+   */
+  public static void removeContainer(ContainerData containerData,
+      Configuration conf, boolean forceDelete) throws IOException {
+    Preconditions.checkNotNull(containerData);
+    Path dbPath = Paths.get(containerData.getDBPath());
+
+    MetadataStore db = KeyUtils.getDB(containerData, conf);
+    // If the container is not empty and cannot be deleted forcibly,
+    // then throw a SCE to stop deleting.
+    if(!forceDelete && !db.isEmpty()) {
+      throw new StorageContainerException(
+          "Container cannot be deleted because it is not empty.",
+          ContainerProtos.Result.ERROR_CONTAINER_NOT_EMPTY);
+    }
+    // Close the DB connection and remove the DB handler from cache
+    KeyUtils.removeDB(containerData, conf);
+
+    // Delete the DB File.
+    FileUtils.forceDelete(dbPath.toFile());
+    dbPath = dbPath.getParent();
+
+    // Delete all Metadata in the Data directories for this containers.
+    if (dbPath != null) {
+      FileUtils.deleteDirectory(dbPath.toFile());
+      dbPath = dbPath.getParent();
+    }
+
+    // now delete the container directory, this means that all key data dirs
+    // will be removed too.
+    if (dbPath != null) {
+      FileUtils.deleteDirectory(dbPath.toFile());
+    }
+
+    // Delete the container metadata from the metadata locations.
+    String rootPath = getContainerNameFromFile(new File(containerData
+        .getContainerPath()));
+    Path containerPath = Paths.get(rootPath.concat(CONTAINER_EXTENSION));
+    Path metaPath = Paths.get(rootPath.concat(CONTAINER_META));
+
+    FileUtils.forceDelete(containerPath.toFile());
+    FileUtils.forceDelete(metaPath.toFile());
+  }
+
+  /**
+   * Persistent a {@link DatanodeDetails} to a local file.
+   *
+   * @throws IOException when read/write error occurs
+   */
+  public synchronized static void writeDatanodeDetailsTo(
+      DatanodeDetails datanodeDetails, File path) throws IOException {
+    if (path.exists()) {
+      if (!path.delete() || !path.createNewFile()) {
+        throw new IOException("Unable to overwrite the datanode ID file.");
+      }
+    } else {
+      if(!path.getParentFile().exists() &&
+          !path.getParentFile().mkdirs()) {
+        throw new IOException("Unable to create datanode ID directories.");
+      }
+    }
+    try (FileOutputStream out = new FileOutputStream(path)) {
+      HddsProtos.DatanodeDetailsProto proto =
+          datanodeDetails.getProtoBufMessage();
+      proto.writeTo(out);
+    }
+  }
+
+  /**
+   * Read {@link DatanodeDetails} from a local ID file.
+   *
+   * @param path ID file local path
+   * @return {@link DatanodeDetails}
+   * @throws IOException If the id file is malformed or other I/O exceptions
+   */
+  public synchronized static DatanodeDetails readDatanodeDetailsFrom(File path)
+      throws IOException {
+    if (!path.exists()) {
+      throw new IOException("Datanode ID file not found.");
+    }
+    try(FileInputStream in = new FileInputStream(path)) {
+      return DatanodeDetails.getFromProtoBuf(
+          HddsProtos.DatanodeDetailsProto.parseFrom(in));
+    } catch (IOException e) {
+      throw new IOException("Failed to parse DatanodeDetails from "
+          + path.getAbsolutePath(), e);
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
new file mode 100644
index 0000000..ade162a
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.collect.Maps;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * A helper class to wrap the info about under deletion container blocks.
+ */
+public final class DeletedContainerBlocksSummary {
+
+  private final List<DeletedBlocksTransaction> blocks;
+  // key : txID
+  // value : times of this tx has been processed
+  private final Map<Long, Integer> txSummary;
+  // key : container name
+  // value : the number of blocks need to be deleted in this container
+  // if the message contains multiple entries for same block,
+  // blocks will be merged
+  private final Map<String, Integer> blockSummary;
+  // total number of blocks in this message
+  private int numOfBlocks;
+
+  private DeletedContainerBlocksSummary(List<DeletedBlocksTransaction> blocks) {
+    this.blocks = blocks;
+    txSummary = Maps.newHashMap();
+    blockSummary = Maps.newHashMap();
+    blocks.forEach(entry -> {
+      txSummary.put(entry.getTxID(), entry.getCount());
+      if (blockSummary.containsKey(entry.getContainerName())) {
+        blockSummary.put(entry.getContainerName(),
+            blockSummary.get(entry.getContainerName())
+                + entry.getBlockIDCount());
+      } else {
+        blockSummary.put(entry.getContainerName(), entry.getBlockIDCount());
+      }
+      numOfBlocks += entry.getBlockIDCount();
+    });
+  }
+
+  public static DeletedContainerBlocksSummary getFrom(
+      List<DeletedBlocksTransaction> blocks) {
+    return new DeletedContainerBlocksSummary(blocks);
+  }
+
+  public int getNumOfBlocks() {
+    return numOfBlocks;
+  }
+
+  public int getNumOfContainers() {
+    return blockSummary.size();
+  }
+
+  public String getTXIDs() {
+    return String.join(",", txSummary.keySet()
+        .stream().map(String::valueOf).collect(Collectors.toList()));
+  }
+
+  public String getTxIDSummary() {
+    List<String> txSummaryEntry = txSummary.entrySet().stream()
+        .map(entry -> entry.getKey() + "(" + entry.getValue() + ")")
+        .collect(Collectors.toList());
+    return "[" + String.join(",", txSummaryEntry) + "]";
+  }
+
+  @Override public String toString() {
+    StringBuffer sb = new StringBuffer();
+    for (DeletedBlocksTransaction blks : blocks) {
+      sb.append(" ")
+          .append("TXID=")
+          .append(blks.getTxID())
+          .append(", ")
+          .append("TimesProceed=")
+          .append(blks.getCount())
+          .append(", ")
+          .append(blks.getContainerName())
+          .append(" : [")
+          .append(String.join(",", blks.getBlockIDList())).append("]")
+          .append("\n");
+    }
+    return sb.toString();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java
new file mode 100644
index 0000000..566db02
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/FileUtils.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import com.google.protobuf.ByteString;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+
+/**
+ * File Utils are helper routines used by putSmallFile and getSmallFile
+ * RPCs.
+ */
+public final class FileUtils {
+  /**
+   * Never Constructed.
+   */
+  private FileUtils() {
+  }
+
+  /**
+   * Gets a response for the putSmallFile RPC.
+   * @param msg - ContainerCommandRequestProto
+   * @return - ContainerCommandResponseProto
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      getPutFileResponse(ContainerProtos.ContainerCommandRequestProto msg) {
+    ContainerProtos.PutSmallFileResponseProto.Builder getResponse =
+        ContainerProtos.PutSmallFileResponseProto.newBuilder();
+    ContainerProtos.ContainerCommandResponseProto.Builder builder =
+        ContainerUtils.getContainerResponse(msg, ContainerProtos.Result
+            .SUCCESS, "");
+    builder.setCmdType(ContainerProtos.Type.PutSmallFile);
+    builder.setPutSmallFile(getResponse);
+    return  builder.build();
+  }
+
+  /**
+   * Gets a response to the read small file call.
+   * @param msg - Msg
+   * @param data  - Data
+   * @param info  - Info
+   * @return    Response.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      getGetSmallFileResponse(ContainerProtos.ContainerCommandRequestProto msg,
+      byte[] data, ChunkInfo info) {
+    Preconditions.checkNotNull(msg);
+
+    ContainerProtos.ReadChunkResponseProto.Builder readChunkresponse =
+        ContainerProtos.ReadChunkResponseProto.newBuilder();
+    readChunkresponse.setChunkData(info.getProtoBufMessage());
+    readChunkresponse.setData(ByteString.copyFrom(data));
+    readChunkresponse.setPipeline(msg.getGetSmallFile().getKey().getPipeline());
+
+    ContainerProtos.GetSmallFileResponseProto.Builder getSmallFile =
+        ContainerProtos.GetSmallFileResponseProto.newBuilder();
+    getSmallFile.setData(readChunkresponse.build());
+    ContainerProtos.ContainerCommandResponseProto.Builder builder =
+        ContainerUtils.getContainerResponse(msg, ContainerProtos.Result
+            .SUCCESS, "");
+    builder.setCmdType(ContainerProtos.Type.GetSmallFile);
+    builder.setGetSmallFile(getSmallFile);
+    return builder.build();
+  }
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java
new file mode 100644
index 0000000..33eb911
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyUtils.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
+import org.apache.hadoop.utils.MetadataStore;
+
+import java.io.IOException;
+import java.nio.charset.Charset;
+
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .NO_SUCH_KEY;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNABLE_TO_READ_METADATA_DB;
+
+/**
+ * Utils functions to help key functions.
+ */
+public final class KeyUtils {
+  public static final String ENCODING_NAME = "UTF-8";
+  public static final Charset ENCODING = Charset.forName(ENCODING_NAME);
+
+  /**
+   * Never Constructed.
+   */
+  private KeyUtils() {
+  }
+
+  /**
+   * Get a DB handler for a given container.
+   * If the handler doesn't exist in cache yet, first create one and
+   * add into cache. This function is called with containerManager
+   * ReadLock held.
+   *
+   * @param container container.
+   * @param conf configuration.
+   * @return MetadataStore handle.
+   * @throws StorageContainerException
+   */
+  public static MetadataStore getDB(ContainerData container,
+      Configuration conf) throws StorageContainerException {
+    Preconditions.checkNotNull(container);
+    ContainerCache cache = ContainerCache.getInstance(conf);
+    Preconditions.checkNotNull(cache);
+    try {
+      return cache.getDB(container.getContainerName(), container.getDBPath());
+    } catch (IOException ex) {
+      String message =
+          String.format("Unable to open DB. DB Name: %s, Path: %s. ex: %s",
+          container.getContainerName(), container.getDBPath(), ex.getMessage());
+      throw new StorageContainerException(message, UNABLE_TO_READ_METADATA_DB);
+    }
+  }
+
+  /**
+   * Remove a DB handler from cache.
+   *
+   * @param container - Container data.
+   * @param conf - Configuration.
+   */
+  public static void removeDB(ContainerData container,
+      Configuration conf) {
+    Preconditions.checkNotNull(container);
+    ContainerCache cache = ContainerCache.getInstance(conf);
+    Preconditions.checkNotNull(cache);
+    cache.removeDB(container.getContainerName());
+  }
+  /**
+   * Shutdown all DB Handles.
+   *
+   * @param cache - Cache for DB Handles.
+   */
+  @SuppressWarnings("unchecked")
+  public static void shutdownCache(ContainerCache cache)  {
+    cache.shutdownCache();
+  }
+
+  /**
+   * Returns successful keyResponse.
+   * @param msg - Request.
+   * @return Response.
+   */
+  public static ContainerProtos.ContainerCommandResponseProto
+      getKeyResponse(ContainerProtos.ContainerCommandRequestProto msg) {
+    return ContainerUtils.getContainerResponse(msg);
+  }
+
+
+  public static ContainerProtos.ContainerCommandResponseProto
+      getKeyDataResponse(ContainerProtos.ContainerCommandRequestProto msg,
+      KeyData data) {
+    ContainerProtos.GetKeyResponseProto.Builder getKey = ContainerProtos
+        .GetKeyResponseProto.newBuilder();
+    getKey.setKeyData(data.getProtoBufMessage());
+    ContainerProtos.ContainerCommandResponseProto.Builder builder =
+        ContainerUtils.getContainerResponse(msg, ContainerProtos.Result
+            .SUCCESS, "");
+    builder.setGetKey(getKey);
+    return  builder.build();
+  }
+
+  /**
+   * Parses the key name from a bytes array.
+   * @param bytes key name in bytes.
+   * @return key name string.
+   */
+  public static String getKeyName(byte[] bytes) {
+    return new String(bytes, ENCODING);
+  }
+
+  /**
+   * Parses the {@link KeyData} from a bytes array.
+   *
+   * @param bytes key data in bytes.
+   * @return key data.
+   * @throws IOException if the bytes array is malformed or invalid.
+   */
+  public static KeyData getKeyData(byte[] bytes) throws IOException {
+    try {
+      ContainerProtos.KeyData kd = ContainerProtos.KeyData.parseFrom(bytes);
+      KeyData data = KeyData.getFromProtoBuf(kd);
+      return data;
+    } catch (IOException e) {
+      throw new StorageContainerException("Failed to parse key data from the" +
+              " bytes array.", NO_SUCH_KEY);
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java
new file mode 100644
index 0000000..21f31e1
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+/**
+ Contains protocol buffer helper classes and utilites used in
+ impl.
+ **/
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java
new file mode 100644
index 0000000..457c417
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ChunkManagerImpl.java
@@ -0,0 +1,235 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.impl;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkUtils;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.file.Files;
+import java.nio.file.StandardCopyOption;
+import java.security.NoSuchAlgorithmException;
+import java.util.concurrent.ExecutionException;
+
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CONTAINER_INTERNAL_ERROR;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNSUPPORTED_REQUEST;
+
+/**
+ * An implementation of ChunkManager that is used by default in ozone.
+ */
+public class ChunkManagerImpl implements ChunkManager {
+  static final Logger LOG =
+      LoggerFactory.getLogger(ChunkManagerImpl.class);
+
+  private final ContainerManager containerManager;
+
+  /**
+   * Constructs a ChunkManager.
+   *
+   * @param manager - ContainerManager.
+   */
+  public ChunkManagerImpl(ContainerManager manager) {
+    this.containerManager = manager;
+  }
+
+  /**
+   * writes a given chunk.
+   *
+   * @param pipeline - Name and the set of machines that make this container.
+   * @param keyName - Name of the Key.
+   * @param info - ChunkInfo.
+   * @throws StorageContainerException
+   */
+  @Override
+  public void writeChunk(Pipeline pipeline, String keyName, ChunkInfo info,
+      byte[] data, ContainerProtos.Stage stage)
+      throws StorageContainerException {
+    // we don't want container manager to go away while we are writing chunks.
+    containerManager.readLock();
+
+    // TODO : Take keyManager Write lock here.
+    try {
+      Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+      String containerName = pipeline.getContainerName();
+      Preconditions.checkNotNull(containerName,
+          "Container name cannot be null");
+      ContainerData container =
+          containerManager.readContainer(containerName);
+      File chunkFile = ChunkUtils.validateChunk(pipeline, container, info);
+      File tmpChunkFile = getTmpChunkFile(chunkFile, info);
+
+      LOG.debug("writing chunk:{} chunk stage:{} chunk file:{} tmp chunk file",
+          info.getChunkName(), stage, chunkFile, tmpChunkFile);
+      switch (stage) {
+      case WRITE_DATA:
+        ChunkUtils.writeData(tmpChunkFile, info, data);
+        break;
+      case COMMIT_DATA:
+        commitChunk(tmpChunkFile, chunkFile, containerName, info.getLen());
+        break;
+      case COMBINED:
+        // directly write to the chunk file
+        long oldSize = chunkFile.length();
+        ChunkUtils.writeData(chunkFile, info, data);
+        long newSize = chunkFile.length();
+        containerManager.incrBytesUsed(containerName, newSize - oldSize);
+        containerManager.incrWriteCount(containerName);
+        containerManager.incrWriteBytes(containerName, info.getLen());
+        break;
+      default:
+        throw new IOException("Can not identify write operation.");
+      }
+    } catch (ExecutionException | NoSuchAlgorithmException | IOException e) {
+      LOG.error("write data failed. error: {}", e);
+      throw new StorageContainerException("Internal error: ", e,
+          CONTAINER_INTERNAL_ERROR);
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      LOG.error("write data failed. error: {}", e);
+      throw new StorageContainerException("Internal error: ", e,
+          CONTAINER_INTERNAL_ERROR);
+    } finally {
+      containerManager.readUnlock();
+    }
+  }
+
+  // Create a temporary file in the same container directory
+  // in the format "<chunkname>.tmp"
+  private static File getTmpChunkFile(File chunkFile, ChunkInfo info)
+      throws StorageContainerException {
+    return new File(chunkFile.getParent(),
+        chunkFile.getName() +
+            OzoneConsts.CONTAINER_CHUNK_NAME_DELIMITER +
+            OzoneConsts.CONTAINER_TEMPORARY_CHUNK_PREFIX);
+  }
+
+  // Commit the chunk by renaming the temporary chunk file to chunk file
+  private void commitChunk(File tmpChunkFile, File chunkFile,
+      String containerName, long chunkLen) throws IOException {
+    long sizeDiff = tmpChunkFile.length() - chunkFile.length();
+    // It is safe to replace here as the earlier chunk if existing should be
+    // caught as part of validateChunk
+    Files.move(tmpChunkFile.toPath(), chunkFile.toPath(),
+        StandardCopyOption.REPLACE_EXISTING);
+    containerManager.incrBytesUsed(containerName, sizeDiff);
+    containerManager.incrWriteCount(containerName);
+    containerManager.incrWriteBytes(containerName, chunkLen);
+  }
+
+  /**
+   * reads the data defined by a chunk.
+   *
+   * @param pipeline - container pipeline.
+   * @param keyName - Name of the Key
+   * @param info - ChunkInfo.
+   * @return byte array
+   * @throws StorageContainerException
+   * TODO: Right now we do not support partial reads and writes of chunks.
+   * TODO: Explore if we need to do that for ozone.
+   */
+  @Override
+  public byte[] readChunk(Pipeline pipeline, String keyName, ChunkInfo info)
+      throws StorageContainerException {
+    containerManager.readLock();
+    try {
+      Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+      String containerName = pipeline.getContainerName();
+      Preconditions.checkNotNull(containerName,
+          "Container name cannot be null");
+      ContainerData container =
+          containerManager.readContainer(containerName);
+      File chunkFile = ChunkUtils.getChunkFile(pipeline, container, info);
+      ByteBuffer data =  ChunkUtils.readData(chunkFile, info);
+      containerManager.incrReadCount(containerName);
+      containerManager.incrReadBytes(containerName, chunkFile.length());
+      return data.array();
+    } catch (ExecutionException | NoSuchAlgorithmException e) {
+      LOG.error("read data failed. error: {}", e);
+      throw new StorageContainerException("Internal error: ",
+          e, CONTAINER_INTERNAL_ERROR);
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      LOG.error("read data failed. error: {}", e);
+      throw new StorageContainerException("Internal error: ",
+          e, CONTAINER_INTERNAL_ERROR);
+    } finally {
+      containerManager.readUnlock();
+    }
+  }
+
+  /**
+   * Deletes a given chunk.
+   *
+   * @param pipeline - Pipeline.
+   * @param keyName - Key Name
+   * @param info - Chunk Info
+   * @throws StorageContainerException
+   */
+  @Override
+  public void deleteChunk(Pipeline pipeline, String keyName, ChunkInfo info)
+      throws StorageContainerException {
+    containerManager.readLock();
+    try {
+      Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+      String containerName = pipeline.getContainerName();
+      Preconditions.checkNotNull(containerName,
+          "Container name cannot be null");
+      File chunkFile = ChunkUtils.getChunkFile(pipeline, containerManager
+          .readContainer(containerName), info);
+      if ((info.getOffset() == 0) && (info.getLen() == chunkFile.length())) {
+        FileUtil.fullyDelete(chunkFile);
+        containerManager.decrBytesUsed(containerName, chunkFile.length());
+      } else {
+        LOG.error("Not Supported Operation. Trying to delete a " +
+            "chunk that is in shared file. chunk info : " + info.toString());
+        throw new StorageContainerException("Not Supported Operation. " +
+            "Trying to delete a chunk that is in shared file. chunk info : "
+            + info.toString(), UNSUPPORTED_REQUEST);
+      }
+    } finally {
+      containerManager.readUnlock();
+    }
+  }
+
+  /**
+   * Shutdown the chunkManager.
+   *
+   * In the chunkManager we haven't acquired any resources, so nothing to do
+   * here. This call is made with containerManager Writelock held.
+   */
+  @Override
+  public void shutdown() {
+    Preconditions.checkState(this.containerManager.hasWriteLock());
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerLocationManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerLocationManagerImpl.java
new file mode 100644
index 0000000..e0e826c
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerLocationManagerImpl.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.impl;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerLocationManager;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerLocationManagerMXBean;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * A class that tells the ContainerManager where to place the containers.
+ * Please note : There is *no* one-to-one correlation between metadata
+ * Locations and data Locations.
+ *
+ *  For example : A user could map all container files to a
+ *  SSD but leave data/metadata on bunch of other disks.
+ */
+public class ContainerLocationManagerImpl implements ContainerLocationManager,
+    ContainerLocationManagerMXBean {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerLocationManagerImpl.class);
+
+  private final List<ContainerStorageLocation> dataLocations;
+  private int currentIndex;
+  private final List<StorageLocation> metadataLocations;
+  private final ObjectName jmxbean;
+
+  /**
+   * Constructs a Location Manager.
+   * @param metadataLocations  - Refers to the metadataLocations
+   * where we store the container metadata.
+   * @param dataDirs - metadataLocations where we store the actual
+   * data or chunk files.
+   * @param conf - configuration.
+   * @throws IOException
+   */
+  public ContainerLocationManagerImpl(List<StorageLocation> metadataLocations,
+      List<StorageLocation> dataDirs, Configuration conf)
+      throws IOException {
+    dataLocations = new LinkedList<>();
+    for (StorageLocation dataDir : dataDirs) {
+      dataLocations.add(new ContainerStorageLocation(dataDir, conf));
+    }
+    this.metadataLocations = metadataLocations;
+    jmxbean = MBeans.register("OzoneDataNode",
+        ContainerLocationManager.class.getSimpleName(), this);
+  }
+
+  /**
+   * Returns the path where the container should be placed from a set of
+   * metadataLocations.
+   *
+   * @return A path where we should place this container and metadata.
+   * @throws IOException
+   */
+  @Override
+  public Path getContainerPath()
+      throws IOException {
+    Preconditions.checkState(metadataLocations.size() > 0);
+    int index = currentIndex % metadataLocations.size();
+    return Paths.get(metadataLocations.get(index).getNormalizedUri());
+  }
+
+  /**
+   * Returns the path where the container Data file are stored.
+   *
+   * @return  a path where we place the LevelDB and data files of a container.
+   * @throws IOException
+   */
+  @Override
+  public Path getDataPath(String containerName) throws IOException {
+    Path currentPath = Paths.get(
+        dataLocations.get(currentIndex++ % dataLocations.size())
+            .getNormalizedUri());
+    currentPath = currentPath.resolve(OzoneConsts.CONTAINER_PREFIX);
+    return currentPath.resolve(containerName);
+  }
+
+  @Override
+  public StorageLocationReport[] getLocationReport() throws IOException {
+    StorageLocationReport[] reports =
+        new StorageLocationReport[dataLocations.size()];
+    for (int idx = 0; idx < dataLocations.size(); idx++) {
+      ContainerStorageLocation loc = dataLocations.get(idx);
+      long scmUsed = 0;
+      long remaining = 0;
+      try {
+        scmUsed = loc.getScmUsed();
+        remaining = loc.getAvailable();
+      } catch (IOException ex) {
+        LOG.warn("Failed to get scmUsed and remaining for container " +
+            "storage location {}", loc.getNormalizedUri());
+        // reset scmUsed and remaining if df/du failed.
+        scmUsed = 0;
+        remaining = 0;
+      }
+
+      // TODO: handle failed storage
+      // For now, include storage report for location that failed to get df/du.
+      StorageLocationReport r = new StorageLocationReport(
+          loc.getStorageUuId(), false, loc.getCapacity(),
+          scmUsed, remaining);
+      reports[idx] = r;
+    }
+    return reports;
+  }
+
+  /**
+   * Supports clean shutdown of container location du threads.
+   *
+   * @throws IOException
+   */
+  @Override
+  public void shutdown() throws IOException {
+    for (ContainerStorageLocation loc: dataLocations) {
+      loc.shutdown();
+    }
+    MBeans.unregister(jmxbean);
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java
new file mode 100644
index 0000000..5e7375c
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java
@@ -0,0 +1,1113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.impl;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMStorageReport;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerDeletionChoosingPolicy;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerLocationManager;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerReportManager;
+import org.apache.hadoop.ozone.container.common.interfaces.KeyManager;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.utils.MetadataStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.DigestInputStream;
+import java.security.DigestOutputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentNavigableMap;
+import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.stream.Collectors;
+
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CONTAINER_EXISTS;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CONTAINER_INTERNAL_ERROR;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CONTAINER_NOT_FOUND;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .ERROR_IN_COMPACT_DB;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .INVALID_CONFIG;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .IO_EXCEPTION;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .NO_SUCH_ALGORITHM;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNABLE_TO_READ_METADATA_DB;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNCLOSED_CONTAINER_IO;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .UNSUPPORTED_REQUEST;
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION;
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_META;
+
+/**
+ * A Generic ContainerManagerImpl that will be called from Ozone
+ * ContainerManagerImpl. This allows us to support delta changes to ozone
+ * version without having to rewrite the containerManager.
+ */
+public class ContainerManagerImpl implements ContainerManager {
+  static final Logger LOG =
+      LoggerFactory.getLogger(ContainerManagerImpl.class);
+
+  private final ConcurrentSkipListMap<String, ContainerStatus>
+      containerMap = new ConcurrentSkipListMap<>();
+
+  // Use a non-fair RW lock for better throughput, we may revisit this decision
+  // if this causes fairness issues.
+  private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+  private ContainerLocationManager locationManager;
+  private ChunkManager chunkManager;
+  private KeyManager keyManager;
+  private Configuration conf;
+  private DatanodeDetails datanodeDetails;
+
+  private ContainerDeletionChoosingPolicy containerDeletionChooser;
+  private ContainerReportManager containerReportManager;
+
+  /**
+   * Init call that sets up a container Manager.
+   *
+   * @param config - Configuration.
+   * @param containerDirs - List of Metadata Container locations.
+   * @param dnDetails - DatanodeDetails.
+   * @throws IOException
+   */
+  @Override
+  public void init(
+      Configuration config, List<StorageLocation> containerDirs,
+      DatanodeDetails dnDetails) throws IOException {
+    Preconditions.checkNotNull(config, "Config must not be null");
+    Preconditions.checkNotNull(containerDirs, "Container directories cannot " +
+        "be null");
+    Preconditions.checkNotNull(dnDetails, "Datanode Details cannot " +
+        "be null");
+
+    Preconditions.checkState(containerDirs.size() > 0, "Number of container" +
+        " directories must be greater than zero.");
+
+    this.conf = config;
+    this.datanodeDetails = dnDetails;
+
+    readLock();
+    try {
+      containerDeletionChooser = ReflectionUtils.newInstance(conf.getClass(
+          ScmConfigKeys.OZONE_SCM_CONTAINER_DELETION_CHOOSING_POLICY,
+          TopNOrderedContainerDeletionChoosingPolicy.class,
+          ContainerDeletionChoosingPolicy.class), conf);
+
+      for (StorageLocation path : containerDirs) {
+        File directory = Paths.get(path.getNormalizedUri()).toFile();
+        if (!directory.exists() && !directory.mkdirs()) {
+          LOG.error("Container metadata directory doesn't exist "
+              + "and cannot be created. Path: {}", path.toString());
+          throw new StorageContainerException("Container metadata "
+              + "directory doesn't exist and cannot be created " + path
+              .toString(), INVALID_CONFIG);
+        }
+
+        // TODO: This will fail if any directory is invalid.
+        // We should fix this to handle invalid directories and continue.
+        // Leaving it this way to fail fast for time being.
+        if (!directory.isDirectory()) {
+          LOG.error("Invalid path to container metadata directory. path: {}",
+              path.toString());
+          throw new StorageContainerException("Invalid path to container " +
+              "metadata directory." + path, INVALID_CONFIG);
+        }
+        LOG.info("Loading containers under {}", path);
+        File[] files = directory.listFiles(new ContainerFilter());
+        if (files != null) {
+          for (File containerFile : files) {
+            LOG.debug("Loading container {}", containerFile);
+            String containerPath =
+                ContainerUtils.getContainerNameFromFile(containerFile);
+            Preconditions.checkNotNull(containerPath, "Container path cannot" +
+                " be null");
+            readContainerInfo(containerPath);
+          }
+        }
+      }
+
+      List<StorageLocation> dataDirs = new LinkedList<>();
+      for (String dir : config.getStrings(DFS_DATANODE_DATA_DIR_KEY)) {
+        StorageLocation location = StorageLocation.parse(dir);
+        dataDirs.add(location);
+      }
+      this.locationManager =
+          new ContainerLocationManagerImpl(containerDirs, dataDirs, config);
+
+      this.containerReportManager =
+          new ContainerReportManagerImpl(config);
+    } finally {
+      readUnlock();
+    }
+  }
+
+  /**
+   * Reads the Container Info from a file and verifies that checksum match. If
+   * the checksums match, then that file is added to containerMap.
+   *
+   * @param containerName - Name which points to the persisted container.
+   * @throws StorageContainerException
+   */
+  private void readContainerInfo(String containerName)
+      throws StorageContainerException {
+    Preconditions.checkState(containerName.length() > 0,
+        "Container name length cannot be zero.");
+    FileInputStream containerStream = null;
+    DigestInputStream dis = null;
+    FileInputStream metaStream = null;
+    Path cPath = Paths.get(containerName).getFileName();
+    String keyName = null;
+    if (cPath != null) {
+      keyName = cPath.toString();
+    }
+    Preconditions.checkNotNull(keyName,
+        "Container Name  to container key mapping is null");
+
+    try {
+      String containerFileName = containerName.concat(CONTAINER_EXTENSION);
+      String metaFileName = containerName.concat(CONTAINER_META);
+
+      containerStream = new FileInputStream(containerFileName);
+
+      metaStream = new FileInputStream(metaFileName);
+
+      MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+
+      dis = new DigestInputStream(containerStream, sha);
+
+      ContainerProtos.ContainerData containerDataProto =
+          ContainerProtos.ContainerData.parseDelimitedFrom(dis);
+      ContainerData containerData;
+      if (containerDataProto == null) {
+        // Sometimes container metadata might have been created but empty,
+        // when loading the info we get a null, this often means last time
+        // SCM was ending up at some middle phase causing that the metadata
+        // was not populated. Such containers are marked as inactive.
+        containerMap.put(keyName, new ContainerStatus(null));
+        return;
+      }
+      containerData = ContainerData.getFromProtBuf(containerDataProto, conf);
+      ContainerProtos.ContainerMeta meta =
+          ContainerProtos.ContainerMeta.parseDelimitedFrom(metaStream);
+      if (meta != null && !DigestUtils.sha256Hex(sha.digest())
+          .equals(meta.getHash())) {
+        // This means we were not able read data from the disk when booted the
+        // datanode. We are going to rely on SCM understanding that we don't
+        // have valid data for this container when we send container reports.
+        // Hopefully SCM will ask us to delete this container and rebuild it.
+        LOG.error("Invalid SHA found for container data. Name :{}"
+            + "cowardly refusing to read invalid data", containerName);
+        containerMap.put(keyName, new ContainerStatus(null));
+        return;
+      }
+
+      ContainerStatus containerStatus = new ContainerStatus(containerData);
+      // Initialize pending deletion blocks count in in-memory
+      // container status.
+      MetadataStore metadata = KeyUtils.getDB(containerData, conf);
+      List<Map.Entry<byte[], byte[]>> underDeletionBlocks = metadata
+          .getSequentialRangeKVs(null, Integer.MAX_VALUE,
+              MetadataKeyFilters.getDeletingKeyFilter());
+      containerStatus.incrPendingDeletionBlocks(underDeletionBlocks.size());
+
+      List<Map.Entry<byte[], byte[]>> liveKeys = metadata
+          .getRangeKVs(null, Integer.MAX_VALUE,
+              MetadataKeyFilters.getNormalKeyFilter());
+
+      // Get container bytesUsed upon loading container
+      // The in-memory state is updated upon key write or delete
+      // TODO: update containerDataProto and persist it into container MetaFile
+      long bytesUsed = 0;
+      bytesUsed = liveKeys.parallelStream().mapToLong(e-> {
+        KeyData keyData;
+        try {
+          keyData = KeyUtils.getKeyData(e.getValue());
+          return keyData.getSize();
+        } catch (IOException ex) {
+          return 0L;
+        }
+      }).sum();
+      containerStatus.setBytesUsed(bytesUsed);
+
+      containerMap.put(keyName, containerStatus);
+    } catch (IOException | NoSuchAlgorithmException ex) {
+      LOG.error("read failed for file: {} ex: {}", containerName,
+          ex.getMessage());
+
+      // TODO : Add this file to a recovery Queue.
+
+      // Remember that this container is busted and we cannot use it.
+      containerMap.put(keyName, new ContainerStatus(null));
+      throw new StorageContainerException("Unable to read container info",
+          UNABLE_TO_READ_METADATA_DB);
+    } finally {
+      IOUtils.closeStream(dis);
+      IOUtils.closeStream(containerStream);
+      IOUtils.closeStream(metaStream);
+    }
+  }
+
+  /**
+   * Creates a container with the given name.
+   *
+   * @param pipeline -- Nodes which make up this container.
+   * @param containerData - Container Name and metadata.
+   * @throws StorageContainerException - Exception
+   */
+  @Override
+  public void createContainer(Pipeline pipeline, ContainerData containerData)
+      throws StorageContainerException {
+    Preconditions.checkNotNull(containerData, "Container data cannot be null");
+    writeLock();
+    try {
+      if (containerMap.containsKey(containerData.getName())) {
+        LOG.debug("container already exists. {}", containerData.getName());
+        throw new StorageContainerException("container already exists.",
+            CONTAINER_EXISTS);
+      }
+
+      // This is by design. We first write and close the
+      // container Info and metadata to a directory.
+      // Then read back and put that info into the containerMap.
+      // This allows us to make sure that our write is consistent.
+
+      writeContainerInfo(containerData, false);
+      File cFile = new File(containerData.getContainerPath());
+      readContainerInfo(ContainerUtils.getContainerNameFromFile(cFile));
+    } catch (NoSuchAlgorithmException ex) {
+      LOG.error("Internal error: We seem to be running a JVM without a " +
+          "needed hash algorithm.");
+      throw new StorageContainerException("failed to create container",
+          NO_SUCH_ALGORITHM);
+    } finally {
+      writeUnlock();
+    }
+
+  }
+
+  /**
+   * Writes a container to a chosen location and updates the container Map.
+   *
+   * The file formats of ContainerData and Container Meta is the following.
+   *
+   * message ContainerData {
+   * required string name = 1;
+   * repeated KeyValue metadata = 2;
+   * optional string dbPath = 3;
+   * optional string containerPath = 4;
+   * optional int64 bytesUsed = 5;
+   * optional int64 size = 6;
+   * }
+   *
+   * message ContainerMeta {
+   * required string fileName = 1;
+   * required string hash = 2;
+   * }
+   *
+   * @param containerData - container Data
+   * @param overwrite - Whether we are overwriting.
+   * @throws StorageContainerException, NoSuchAlgorithmException
+   */
+  private void writeContainerInfo(ContainerData containerData,
+      boolean  overwrite)
+      throws StorageContainerException, NoSuchAlgorithmException {
+
+    Preconditions.checkNotNull(this.locationManager,
+        "Internal error: location manager cannot be null");
+
+    FileOutputStream containerStream = null;
+    DigestOutputStream dos = null;
+    FileOutputStream metaStream = null;
+
+    try {
+      Path metadataPath = null;
+      Path location = (!overwrite) ? locationManager.getContainerPath():
+          Paths.get(containerData.getContainerPath()).getParent();
+      if (location == null) {
+        throw new StorageContainerException(
+            "Failed to get container file path.",
+            CONTAINER_INTERNAL_ERROR);
+      }
+
+      File containerFile = ContainerUtils.getContainerFile(containerData,
+          location);
+      File metadataFile = ContainerUtils.getMetadataFile(containerData,
+          location);
+      String containerName = containerData.getContainerName();
+
+      if(!overwrite) {
+        ContainerUtils.verifyIsNewContainer(containerFile, metadataFile);
+        metadataPath = this.locationManager.getDataPath(containerName);
+        metadataPath = ContainerUtils.createMetadata(metadataPath,
+            containerName, conf);
+      }  else {
+        metadataPath = ContainerUtils.getMetadataDirectory(containerData);
+      }
+
+      containerStream = new FileOutputStream(containerFile);
+      metaStream = new FileOutputStream(metadataFile);
+      MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+
+      dos = new DigestOutputStream(containerStream, sha);
+      containerData.setDBPath(metadataPath.resolve(
+          ContainerUtils.getContainerDbFileName(containerName))
+          .toString());
+      containerData.setContainerPath(containerFile.toString());
+
+      ContainerProtos.ContainerData protoData = containerData
+          .getProtoBufMessage();
+      protoData.writeDelimitedTo(dos);
+
+      ContainerProtos.ContainerMeta protoMeta = ContainerProtos
+          .ContainerMeta.newBuilder()
+          .setFileName(containerFile.toString())
+          .setHash(DigestUtils.sha256Hex(sha.digest()))
+          .build();
+      protoMeta.writeDelimitedTo(metaStream);
+
+    } catch (IOException ex) {
+      // TODO : we need to clean up partially constructed files
+      // The proper way to do would be for a thread
+      // to read all these 3 artifacts and make sure they are
+      // sane. That info needs to come from the replication
+      // pipeline, and if not consistent delete these file.
+
+      // In case of ozone this is *not* a deal breaker since
+      // SCM is guaranteed to generate unique container names.
+      // The saving grace is that we check if we have residue files
+      // lying around when creating a new container. We need to queue
+      // this information to a cleaner thread.
+
+      LOG.error("Creation of container failed. Name: {}, we might need to " +
+              "cleanup partially created artifacts. ",
+          containerData.getContainerName(), ex);
+      throw new StorageContainerException("Container creation failed. ",
+          ex, CONTAINER_INTERNAL_ERROR);
+    } finally {
+      IOUtils.closeStream(dos);
+      IOUtils.closeStream(containerStream);
+      IOUtils.closeStream(metaStream);
+    }
+  }
+
+  /**
+   * Deletes an existing container.
+   *
+   * @param pipeline - nodes that make this container.
+   * @param containerName - name of the container.
+   * @param forceDelete - whether this container should be deleted forcibly.
+   * @throws StorageContainerException
+   */
+  @Override
+  public void deleteContainer(Pipeline pipeline, String containerName,
+      boolean forceDelete) throws StorageContainerException {
+    Preconditions.checkNotNull(containerName, "Container name cannot be null");
+    Preconditions.checkState(containerName.length() > 0,
+        "Container name length cannot be zero.");
+    writeLock();
+    try {
+      if (isOpen(pipeline.getContainerName())) {
+        throw new StorageContainerException(
+            "Deleting an open container is not allowed.",
+            UNCLOSED_CONTAINER_IO);
+      }
+
+      ContainerStatus status = containerMap.get(containerName);
+      if (status == null) {
+        LOG.debug("No such container. Name: {}", containerName);
+        throw new StorageContainerException("No such container. Name : " +
+            containerName, CONTAINER_NOT_FOUND);
+      }
+      if (status.getContainer() == null) {
+        LOG.debug("Invalid container data. Name: {}", containerName);
+        throw new StorageContainerException("Invalid container data. Name : " +
+            containerName, CONTAINER_NOT_FOUND);
+      }
+      ContainerUtils.removeContainer(status.getContainer(), conf, forceDelete);
+      containerMap.remove(containerName);
+    } catch (StorageContainerException e) {
+      throw e;
+    } catch (IOException e) {
+      // TODO : An I/O error during delete can leave partial artifacts on the
+      // disk. We will need the cleaner thread to cleanup this information.
+      LOG.error("Failed to cleanup container. Name: {}", containerName, e);
+      throw new StorageContainerException(containerName, e, IO_EXCEPTION);
+    } finally {
+      writeUnlock();
+    }
+  }
+
+  /**
+   * A simple interface for container Iterations.
+   * <p/>
+   * This call make no guarantees about consistency of the data between
+   * different list calls. It just returns the best known data at that point of
+   * time. It is possible that using this iteration you can miss certain
+   * container from the listing.
+   *
+   * @param prefix -  Return keys that match this prefix.
+   * @param count - how many to return
+   * @param prevKey - Previous Key Value or empty String.
+   * @param data - Actual containerData
+   * @throws StorageContainerException
+   */
+  @Override
+  public void listContainer(String prefix, long count, String prevKey,
+      List<ContainerData> data) throws StorageContainerException {
+    // TODO : Support list with Prefix and PrevKey
+    Preconditions.checkNotNull(data,
+        "Internal assertion: data cannot be null");
+    readLock();
+    try {
+      ConcurrentNavigableMap<String, ContainerStatus> map;
+      if (prevKey == null || prevKey.isEmpty()) {
+        map = containerMap.tailMap(containerMap.firstKey(), true);
+      } else {
+        map = containerMap.tailMap(prevKey, false);
+      }
+
+      int currentCount = 0;
+      for (ContainerStatus entry : map.values()) {
+        if (currentCount < count) {
+          data.add(entry.getContainer());
+          currentCount++;
+        } else {
+          return;
+        }
+      }
+    } finally {
+      readUnlock();
+    }
+  }
+
+  /**
+   * Get metadata about a specific container.
+   *
+   * @param containerName - Name of the container
+   * @return ContainerData - Container Data.
+   * @throws StorageContainerException
+   */
+  @Override
+  public ContainerData readContainer(String containerName) throws
+      StorageContainerException {
+    Preconditions.checkNotNull(containerName, "Container name cannot be null");
+    Preconditions.checkState(containerName.length() > 0,
+        "Container name length cannot be zero.");
+    if (!containerMap.containsKey(containerName)) {
+      throw new StorageContainerException("Unable to find the container. Name: "
+          + containerName, CONTAINER_NOT_FOUND);
+    }
+    ContainerData cData = containerMap.get(containerName).getContainer();
+    if (cData == null) {
+      throw new StorageContainerException("Invalid container data. Name: "
+          + containerName, CONTAINER_INTERNAL_ERROR);
+    }
+    return cData;
+  }
+
+  /**
+   * Closes a open container, if it is already closed or does not exist a
+   * StorageContainerException is thrown.
+   *
+   * @param containerName - Name of the container.
+   * @throws StorageContainerException
+   */
+  @Override
+  public void closeContainer(String containerName)
+      throws StorageContainerException, NoSuchAlgorithmException {
+    ContainerData containerData = readContainer(containerName);
+    containerData.closeContainer();
+    writeContainerInfo(containerData, true);
+    MetadataStore db = KeyUtils.getDB(containerData, conf);
+
+    // It is ok if this operation takes a bit of time.
+    // Close container is not expected to be instantaneous.
+    try {
+      db.compactDB();
+    } catch (IOException e) {
+      LOG.error("Error in DB compaction while closing container", e);
+      throw new StorageContainerException(e, ERROR_IN_COMPACT_DB);
+    }
+
+    // Active is different from closed. Closed means it is immutable, active
+    // false means we have some internal error that is happening to this
+    // container. This is a way to track damaged containers if we have an
+    // I/O failure, this allows us to take quick action in case of container
+    // issues.
+
+    ContainerStatus status = new ContainerStatus(containerData);
+    containerMap.put(containerName, status);
+  }
+
+  @Override
+  public void updateContainer(Pipeline pipeline, String containerName,
+      ContainerData data, boolean forceUpdate)
+      throws StorageContainerException {
+    Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+    Preconditions.checkNotNull(containerName, "Container name cannot be null");
+    Preconditions.checkNotNull(data, "Container data cannot be null");
+    FileOutputStream containerStream = null;
+    DigestOutputStream dos = null;
+    MessageDigest sha = null;
+    File containerFileBK = null, containerFile = null;
+    boolean deleted = false;
+
+    if(!containerMap.containsKey(containerName)) {
+      throw new StorageContainerException("Container doesn't exist. Name :"
+          + containerName, CONTAINER_NOT_FOUND);
+    }
+
+    try {
+      sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+    } catch (NoSuchAlgorithmException e) {
+      throw new StorageContainerException("Unable to create Message Digest,"
+          + " usually this is a java configuration issue.",
+          NO_SUCH_ALGORITHM);
+    }
+
+    try {
+      Path location = locationManager.getContainerPath();
+      ContainerData orgData = containerMap.get(containerName).getContainer();
+      if (orgData == null) {
+        // updating a invalid container
+        throw new StorageContainerException("Update a container with invalid" +
+            "container meta data", CONTAINER_INTERNAL_ERROR);
+      }
+
+      if (!forceUpdate && !orgData.isOpen()) {
+        throw new StorageContainerException(
+            "Update a closed container is not allowed. Name: " + containerName,
+            UNSUPPORTED_REQUEST);
+      }
+
+      containerFile = ContainerUtils.getContainerFile(orgData, location);
+      // If forceUpdate is true, there is no need to check
+      // whether the container file exists.
+      if (!forceUpdate) {
+        if (!containerFile.exists() || !containerFile.canWrite()) {
+          throw new StorageContainerException(
+              "Container file not exists or corrupted. Name: " + containerName,
+              CONTAINER_INTERNAL_ERROR);
+        }
+
+        // Backup the container file
+        containerFileBK = File.createTempFile(
+            "tmp_" + System.currentTimeMillis() + "_",
+            containerFile.getName(), containerFile.getParentFile());
+        FileUtils.copyFile(containerFile, containerFileBK);
+
+        deleted = containerFile.delete();
+        containerStream = new FileOutputStream(containerFile);
+        dos = new DigestOutputStream(containerStream, sha);
+
+        ContainerProtos.ContainerData protoData = data.getProtoBufMessage();
+        protoData.writeDelimitedTo(dos);
+      }
+
+      // Update the in-memory map
+      ContainerStatus newStatus = new ContainerStatus(data);
+      containerMap.replace(containerName, newStatus);
+    } catch (IOException e) {
+      // Restore the container file from backup
+      if(containerFileBK != null && containerFileBK.exists() && deleted) {
+        if(containerFile.delete()
+            && containerFileBK.renameTo(containerFile)) {
+          throw new StorageContainerException("Container update failed,"
+              + " container data restored from the backup.",
+              CONTAINER_INTERNAL_ERROR);
+        } else {
+          throw new StorageContainerException(
+              "Failed to restore container data from the backup. Name: "
+                  + containerName, CONTAINER_INTERNAL_ERROR);
+        }
+      } else {
+        throw new StorageContainerException(
+            e.getMessage(), CONTAINER_INTERNAL_ERROR);
+      }
+    } finally {
+      if (containerFileBK != null && containerFileBK.exists()) {
+        if(!containerFileBK.delete()) {
+          LOG.warn("Unable to delete container file backup : {}.",
+              containerFileBK.getAbsolutePath());
+        }
+      }
+      IOUtils.closeStream(dos);
+      IOUtils.closeStream(containerStream);
+    }
+  }
+
+  @VisibleForTesting
+  protected File getContainerFile(ContainerData data) throws IOException {
+    return ContainerUtils.getContainerFile(data,
+        this.locationManager.getContainerPath());
+  }
+
+  /**
+   * Checks if a container exists.
+   *
+   * @param containerName - Name of the container.
+   * @return true if the container is open false otherwise.
+   * @throws StorageContainerException - Throws Exception if we are not able to
+   *                                   find the container.
+   */
+  @Override
+  public boolean isOpen(String containerName) throws StorageContainerException {
+    final ContainerStatus status = containerMap.get(containerName);
+    if (status == null) {
+      throw new StorageContainerException(
+          "Container status not found: " + containerName, CONTAINER_NOT_FOUND);
+    }
+    final ContainerData cData = status.getContainer();
+    if (cData == null) {
+      throw new StorageContainerException(
+          "Container not found: " + containerName, CONTAINER_NOT_FOUND);
+    }
+    return cData.isOpen();
+  }
+
+  /**
+   * Supports clean shutdown of container.
+   *
+   * @throws IOException
+   */
+  @Override
+  public void shutdown() throws IOException {
+    Preconditions.checkState(this.hasWriteLock(),
+        "Assumption that we are holding the lock violated.");
+    this.containerMap.clear();
+    this.locationManager.shutdown();
+  }
+
+
+  @VisibleForTesting
+  public ConcurrentSkipListMap<String, ContainerStatus> getContainerMap() {
+    return containerMap;
+  }
+
+  /**
+   * Acquire read lock.
+   */
+  @Override
+  public void readLock() {
+    this.lock.readLock().lock();
+
+  }
+
+  @Override
+  public void readLockInterruptibly() throws InterruptedException {
+    this.lock.readLock().lockInterruptibly();
+  }
+
+  /**
+   * Release read lock.
+   */
+  @Override
+  public void readUnlock() {
+    this.lock.readLock().unlock();
+  }
+
+  /**
+   * Check if the current thread holds read lock.
+   */
+  @Override
+  public boolean hasReadLock() {
+    return this.lock.readLock().tryLock();
+  }
+
+  /**
+   * Acquire write lock.
+   */
+  @Override
+  public void writeLock() {
+    this.lock.writeLock().lock();
+  }
+
+  /**
+   * Acquire write lock, unless interrupted while waiting.
+   */
+  @Override
+  public void writeLockInterruptibly() throws InterruptedException {
+    this.lock.writeLock().lockInterruptibly();
+
+  }
+
+  /**
+   * Release write lock.
+   */
+  @Override
+  public void writeUnlock() {
+    this.lock.writeLock().unlock();
+
+  }
+
+  /**
+   * Check if the current thread holds write lock.
+   */
+  @Override
+  public boolean hasWriteLock() {
+    return this.lock.writeLock().isHeldByCurrentThread();
+  }
+
+  public ChunkManager getChunkManager() {
+    return this.chunkManager;
+  }
+
+  /**
+   * Sets the chunk Manager.
+   *
+   * @param chunkManager - Chunk Manager
+   */
+  public void setChunkManager(ChunkManager chunkManager) {
+    this.chunkManager = chunkManager;
+  }
+
+  /**
+   * Gets the Key Manager.
+   *
+   * @return KeyManager.
+   */
+  @Override
+  public KeyManager getKeyManager() {
+    return this.keyManager;
+  }
+
+  /**
+   * Get the node report.
+   * @return node report.
+   */
+  @Override
+  public SCMNodeReport getNodeReport() throws IOException {
+    StorageLocationReport[] reports = locationManager.getLocationReport();
+    SCMNodeReport.Builder nrb = SCMNodeReport.newBuilder();
+    for (int i = 0; i < reports.length; i++) {
+      SCMStorageReport.Builder srb = SCMStorageReport.newBuilder();
+      nrb.addStorageReport(i, srb.setStorageUuid(reports[i].getId())
+          .setCapacity(reports[i].getCapacity())
+          .setScmUsed(reports[i].getScmUsed())
+          .setRemaining(reports[i].getRemaining())
+          .build());
+    }
+    return nrb.build();
+  }
+
+
+  /**
+   * Gets container reports.
+   *
+   * @return List of all closed containers.
+   * @throws IOException
+   */
+  @Override
+  public List<ContainerData> getContainerReports() throws IOException {
+    LOG.debug("Starting container report iteration.");
+    // No need for locking since containerMap is a ConcurrentSkipListMap
+    // And we can never get the exact state since close might happen
+    // after we iterate a point.
+    return containerMap.entrySet().stream()
+        .filter(containerStatus ->
+            !containerStatus.getValue().getContainer().isOpen())
+        .map(containerStatus -> containerStatus.getValue().getContainer())
+        .collect(Collectors.toList());
+  }
+
+  /**
+   * Get container report.
+   *
+   * @return The container report.
+   * @throws IOException
+   */
+  @Override
+  public ContainerReportsRequestProto getContainerReport() throws IOException {
+    LOG.debug("Starting container report iteration.");
+    // No need for locking since containerMap is a ConcurrentSkipListMap
+    // And we can never get the exact state since close might happen
+    // after we iterate a point.
+    List<ContainerStatus> containers = containerMap.values().stream()
+        .collect(Collectors.toList());
+
+    ContainerReportsRequestProto.Builder crBuilder =
+        ContainerReportsRequestProto.newBuilder();
+
+    // TODO: support delta based container report
+    crBuilder.setDatanodeDetails(datanodeDetails.getProtoBufMessage())
+        .setType(ContainerReportsRequestProto.reportType.fullReport);
+
+    for (ContainerStatus container: containers) {
+      StorageContainerDatanodeProtocolProtos.ContainerInfo.Builder ciBuilder =
+          StorageContainerDatanodeProtocolProtos.ContainerInfo.newBuilder();
+      ciBuilder.setContainerName(container.getContainer().getContainerName())
+          .setSize(container.getContainer().getMaxSize())
+          .setUsed(container.getContainer().getBytesUsed())
+          .setKeyCount(container.getContainer().getKeyCount())
+          .setReadCount(container.getReadCount())
+          .setWriteCount(container.getWriteCount())
+          .setReadBytes(container.getReadBytes())
+          .setWriteBytes(container.getWriteBytes())
+          .setContainerID(container.getContainer().getContainerID());
+
+      if (container.getContainer().getHash() != null) {
+        ciBuilder.setFinalhash(container.getContainer().getHash());
+      }
+      crBuilder.addReports(ciBuilder.build());
+    }
+
+    return crBuilder.build();
+  }
+
+  /**
+   * Sets the Key Manager.
+   *
+   * @param keyManager - Key Manager.
+   */
+  @Override
+  public void setKeyManager(KeyManager keyManager) {
+    this.keyManager = keyManager;
+  }
+
+  /**
+   * Filter out only container files from the container metadata dir.
+   */
+  private static class ContainerFilter implements FilenameFilter {
+    /**
+     * Tests if a specified file should be included in a file list.
+     *
+     * @param dir the directory in which the file was found.
+     * @param name the name of the file.
+     * @return <code>true</code> if and only if the name should be included in
+     * the file list; <code>false</code> otherwise.
+     */
+    @Override
+    public boolean accept(File dir, String name) {
+      return name.endsWith(CONTAINER_EXTENSION);
+    }
+  }
+
+  @Override
+  public List<ContainerData> chooseContainerForBlockDeletion(
+      int count) throws StorageContainerException {
+    readLock();
+    try {
+      return containerDeletionChooser.chooseContainerForBlockDeletion(
+          count, containerMap);
+    } finally {
+      readUnlock();
+    }
+  }
+
+  @VisibleForTesting
+  public ContainerDeletionChoosingPolicy getContainerDeletionChooser() {
+    return containerDeletionChooser;
+  }
+
+  @Override
+  public void incrPendingDeletionBlocks(int numBlocks, String containerId) {
+    writeLock();
+    try {
+      ContainerStatus status = containerMap.get(containerId);
+      status.incrPendingDeletionBlocks(numBlocks);
+    } finally {
+      writeUnlock();
+    }
+  }
+
+  @Override
+  public void decrPendingDeletionBlocks(int numBlocks, String containerId) {
+    writeLock();
+    try {
+      ContainerStatus status = containerMap.get(containerId);
+      status.decrPendingDeletionBlocks(numBlocks);
+    } finally {
+      writeUnlock();
+    }
+  }
+
+  /**
+   * Increase the read count of the container.
+   *
+   * @param containerName - Name of the container.
+   */
+  @Override
+  public void incrReadCount(String containerName) {
+    ContainerStatus status = containerMap.get(containerName);
+    status.incrReadCount();
+  }
+
+  public long getReadCount(String containerName) {
+    ContainerStatus status = containerMap.get(containerName);
+    return status.getReadCount();
+  }
+
+  /**
+   * Increse the read counter for bytes read from the container.
+   *
+   * @param containerName - Name of the container.
+   * @param readBytes     - bytes read from the container.
+   */
+  @Override
+  public void incrReadBytes(String containerName, long readBytes) {
+    ContainerStatus status = containerMap.get(containerName);
+    status.incrReadBytes(readBytes);
+  }
+
+  public long getReadBytes(String containerName) {
+    readLock();
+    try {
+      ContainerStatus status = containerMap.get(containerName);
+      return status.getReadBytes();
+    } finally {
+      readUnlock();
+    }
+  }
+
+  /**
+   * Increase the write count of the container.
+   *
+   * @param containerName - Name of the container.
+   */
+  @Override
+  public void incrWriteCount(String containerName) {
+    ContainerStatus status = containerMap.get(containerName);
+    status.incrWriteCount();
+  }
+
+  public long getWriteCount(String containerName) {
+    ContainerStatus status = containerMap.get(containerName);
+    return status.getWriteCount();
+  }
+
+  /**
+   * Increse the write counter for bytes write into the container.
+   *
+   * @param containerName - Name of the container.
+   * @param writeBytes    - bytes write into the container.
+   */
+  @Override
+  public void incrWriteBytes(String containerName, long writeBytes) {
+    ContainerStatus status = containerMap.get(containerName);
+    status.incrWriteBytes(writeBytes);
+  }
+
+  public long getWriteBytes(String containerName) {
+    ContainerStatus status = containerMap.get(containerName);
+    return status.getWriteBytes();
+  }
+
+  /**
+   * Increase the bytes used by the container.
+   *
+   * @param containerName - Name of the container.
+   * @param used          - additional bytes used by the container.
+   * @return the current bytes used.
+   */
+  @Override
+  public long incrBytesUsed(String containerName, long used) {
+    ContainerStatus status = containerMap.get(containerName);
+    return status.incrBytesUsed(used);
+  }
+
+  /**
+   * Decrease the bytes used by the container.
+   *
+   * @param containerName - Name of the container.
+   * @param used          - additional bytes reclaimed by the container.
+   * @return the current bytes used.
+   */
+  @Override
+  public long decrBytesUsed(String containerName, long used) {
+    ContainerStatus status = containerMap.get(containerName);
+    return status.decrBytesUsed(used);
+  }
+
+  public long getBytesUsed(String containerName) {
+    ContainerStatus status = containerMap.get(containerName);
+    return status.getBytesUsed();
+  }
+
+  /**
+   * Get the number of keys in the container.
+   *
+   * @param containerName - Name of the container.
+   * @return the current key count.
+   */
+  @Override
+  public long getNumKeys(String containerName) {
+    ContainerStatus status = containerMap.get(containerName);
+    return status.getNumKeys();  }
+
+  /**
+   * Get the container report state to send via HB to SCM.
+   *
+   * @return container report state.
+   */
+  @Override
+  public ReportState getContainerReportState() {
+    return containerReportManager.getContainerReportState();
+  }
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java
new file mode 100644
index 0000000..6c83c66
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerReportManagerImpl.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.impl;
+
+import org.apache.commons.lang3.RandomUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerReportManager;
+import org.apache.hadoop.util.Time;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static org.apache.hadoop.hdds.scm.HddsServerUtil.getScmHeartbeatInterval;
+
+/**
+ * Class wraps the container report operations on datanode.
+ * // TODO: support incremental/delta container report
+ */
+public class ContainerReportManagerImpl implements ContainerReportManager {
+  // Last non-empty container report time
+  private long lastContainerReportTime;
+  private final long containerReportInterval;
+  private final long heartbeatInterval;
+  private AtomicLong reportCount;
+  private static final ReportState NO_CONTAINER_REPORTSTATE =
+      ReportState.newBuilder()
+          .setState(ReportState.states.noContainerReports)
+          .setCount(0).build();
+
+  public ContainerReportManagerImpl(Configuration config) {
+    this.lastContainerReportTime = -1;
+    this.reportCount = new AtomicLong(0L);
+    this.containerReportInterval = config.getTimeDuration(
+        OzoneConfigKeys.OZONE_CONTAINER_REPORT_INTERVAL,
+        OzoneConfigKeys.OZONE_CONTAINER_REPORT_INTERVAL_DEFAULT,
+        TimeUnit.MILLISECONDS);
+    this.heartbeatInterval = getScmHeartbeatInterval(config);
+  }
+
+  public ReportState getContainerReportState() {
+    if (lastContainerReportTime < 0) {
+      return getFullContainerReportState();
+    } else {
+      // Add a random delay (0~30s) on top of the container report
+      // interval (60s) so tha the SCM is overwhelmed by the container reports
+      // sent in sync.
+      if (Time.monotonicNow() - lastContainerReportTime >
+          (containerReportInterval + getRandomReportDelay())) {
+        return getFullContainerReportState();
+      } else {
+        return getNoContainerReportState();
+      }
+    }
+  }
+
+  private ReportState getFullContainerReportState() {
+    ReportState.Builder rsBuilder = ReportState.newBuilder();
+    rsBuilder.setState(ReportState.states.completeContinerReport);
+    rsBuilder.setCount(reportCount.incrementAndGet());
+    this.lastContainerReportTime = Time.monotonicNow();
+    return rsBuilder.build();
+  }
+
+  private ReportState getNoContainerReportState() {
+    return NO_CONTAINER_REPORTSTATE;
+  }
+
+  private long getRandomReportDelay() {
+    return RandomUtils.nextLong(0, heartbeatInterval);
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java
new file mode 100644
index 0000000..5577323
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStatus.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.impl;
+
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * This class represents the state of a container. if the
+ * container reading encountered an error when we boot up we will post that
+ * info to a recovery queue and keep the info in the containerMap.
+ * <p/>
+ * if and when the issue is fixed, the expectation is that this entry will be
+ * deleted by the recovery thread from the containerMap and will insert entry
+ * instead of modifying this class.
+ */
+public class ContainerStatus {
+  private final ContainerData containerData;
+
+  /**
+   * Number of pending deletion blocks in container.
+   */
+  private int numPendingDeletionBlocks;
+
+  private AtomicLong readBytes;
+
+  private AtomicLong writeBytes;
+
+  private AtomicLong readCount;
+
+  private AtomicLong writeCount;
+
+  /**
+   * Creates a Container Status class.
+   *
+   * @param containerData - ContainerData.
+   */
+  ContainerStatus(ContainerData containerData) {
+    this.numPendingDeletionBlocks = 0;
+    this.containerData = containerData;
+    this.readCount = new AtomicLong(0L);
+    this.readBytes =  new AtomicLong(0L);
+    this.writeCount =  new AtomicLong(0L);
+    this.writeBytes =  new AtomicLong(0L);
+  }
+
+  /**
+   * Returns container if it is active. It is not active if we have had an
+   * error and we are waiting for the background threads to fix the issue.
+   *
+   * @return ContainerData.
+   */
+  public ContainerData getContainer() {
+    return containerData;
+  }
+
+  /**
+   * Increase the count of pending deletion blocks.
+   *
+   * @param numBlocks increment number
+   */
+  public void incrPendingDeletionBlocks(int numBlocks) {
+    this.numPendingDeletionBlocks += numBlocks;
+  }
+
+  /**
+   * Decrease the count of pending deletion blocks.
+   *
+   * @param numBlocks decrement number
+   */
+  public void decrPendingDeletionBlocks(int numBlocks) {
+    this.numPendingDeletionBlocks -= numBlocks;
+  }
+
+  /**
+   * Get the number of pending deletion blocks.
+   */
+  public int getNumPendingDeletionBlocks() {
+    return this.numPendingDeletionBlocks;
+  }
+
+  /**
+   * Get the number of bytes read from the container.
+   * @return the number of bytes read from the container.
+   */
+  public long getReadBytes() {
+    return readBytes.get();
+  }
+
+  /**
+   * Increase the number of bytes read from the container.
+   * @param bytes number of bytes read.
+   */
+  public void incrReadBytes(long bytes) {
+    this.readBytes.addAndGet(bytes);
+  }
+
+  /**
+   * Get the number of times the container is read.
+   * @return the number of times the container is read.
+   */
+  public long getReadCount() {
+    return readCount.get();
+  }
+
+  /**
+   * Increase the number of container read count by 1.
+   */
+  public void incrReadCount() {
+    this.readCount.incrementAndGet();
+  }
+
+  /**
+   * Get the number of bytes write into the container.
+   * @return the number of bytes write into the container.
+   */
+  public long getWriteBytes() {
+    return writeBytes.get();
+  }
+
+  /**
+   * Increase the number of bytes write into the container.
+   * @param bytes the number of bytes write into the container.
+   */
+  public void incrWriteBytes(long bytes) {
+    this.writeBytes.addAndGet(bytes);
+  }
+
+  /**
+   * Get the number of writes into the container.
+   * @return the number of writes into the container.
+   */
+  public long getWriteCount() {
+    return writeCount.get();
+  }
+
+  /**
+   * Increase the number of writes into the container by 1.
+   */
+  public void incrWriteCount() {
+    this.writeCount.incrementAndGet();
+  }
+
+  /**
+   * Get the number of bytes used by the container.
+   * @return the number of bytes used by the container.
+   */
+  public long getBytesUsed() {
+    return containerData.getBytesUsed();
+  }
+
+  /**
+   * Increase the number of bytes used by the container.
+   * @param used number of bytes used by the container.
+   * @return the current number of bytes used by the container afert increase.
+   */
+  public long incrBytesUsed(long used) {
+    return containerData.addBytesUsed(used);
+  }
+
+  /**
+   * Set the number of bytes used by the container.
+   * @param used the number of bytes used by the container.
+   */
+  public void setBytesUsed(long used) {
+    containerData.setBytesUsed(used);
+  }
+
+  /**
+   * Decrease the number of bytes used by the container.
+   * @param reclaimed the number of bytes reclaimed from the container.
+   * @return the current number of bytes used by the container after decrease.
+   */
+  public long decrBytesUsed(long reclaimed) {
+    return this.containerData.addBytesUsed(-1L * reclaimed);
+  }
+
+  /**
+   * Get the maximum container size.
+   * @return the maximum container size.
+   */
+  public long getMaxSize() {
+    return containerData.getMaxSize();
+  }
+
+  /**
+   * Set the maximum container size.
+   * @param size the maximum container size.
+   */
+  public void setMaxSize(long size) {
+    this.containerData.setMaxSize(size);
+  }
+
+  /**
+   * Get the number of keys in the container.
+   * @return the number of keys in the container.
+   */
+  public long getNumKeys() {
+    return containerData.getKeyCount();
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java
new file mode 100644
index 0000000..7293895
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerStorageLocation.java
@@ -0,0 +1,203 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.impl;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CachingGetSpaceUsed;
+import org.apache.hadoop.fs.DF;
+import org.apache.hadoop.fs.GetSpaceUsed;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.util.ShutdownHookManager;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.net.URI;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Paths;
+import java.util.Scanner;
+
+import static org.apache.hadoop.util.RunJar.SHUTDOWN_HOOK_PRIORITY;
+
+/**
+ * Class that wraps the space usage of the Datanode Container Storage Location
+ * by SCM containers.
+ */
+public class ContainerStorageLocation {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerStorageLocation.class);
+
+  private static final String DU_CACHE_FILE = "scmUsed";
+  private volatile boolean scmUsedSaved = false;
+
+  private final StorageLocation dataLocation;
+  private final String storageUuId;
+  private final DF usage;
+  private final GetSpaceUsed scmUsage;
+  private final File scmUsedFile;
+
+  public ContainerStorageLocation(StorageLocation dataLoc, Configuration conf)
+      throws IOException {
+    this.dataLocation = dataLoc;
+    this.storageUuId = DatanodeStorage.generateUuid();
+    File dataDir = Paths.get(dataLoc.getNormalizedUri()).resolve(
+        OzoneConsts.CONTAINER_PREFIX).toFile();
+    // Initialize container data root if it does not exist as required by DF/DU
+    if (!dataDir.exists()) {
+      if (!dataDir.mkdirs()) {
+        LOG.error("Unable to create the container storage location at : {}",
+            dataDir);
+        throw new IllegalArgumentException("Unable to create the container" +
+            " storage location at : " + dataDir);
+      }
+    }
+    scmUsedFile = new File(dataDir, DU_CACHE_FILE);
+    // get overall disk usage
+    this.usage = new DF(dataDir, conf);
+    // get SCM specific usage
+    this.scmUsage = new CachingGetSpaceUsed.Builder().setPath(dataDir)
+        .setConf(conf)
+        .setInitialUsed(loadScmUsed())
+        .build();
+
+    // Ensure scm usage is saved during shutdown.
+    ShutdownHookManager.get().addShutdownHook(
+        new Runnable() {
+          @Override
+          public void run() {
+            if (!scmUsedSaved) {
+              saveScmUsed();
+            }
+          }
+        }, SHUTDOWN_HOOK_PRIORITY);
+  }
+
+  public URI getNormalizedUri() {
+    return dataLocation.getNormalizedUri();
+  }
+
+  public String getStorageUuId() {
+    return storageUuId;
+  }
+  public long getCapacity() {
+    long capacity = usage.getCapacity();
+    return (capacity > 0) ? capacity : 0;
+  }
+
+  public long getAvailable() throws IOException {
+    long remaining = getCapacity() - getScmUsed();
+    long available = usage.getAvailable();
+    if (remaining > available) {
+      remaining = available;
+    }
+    return (remaining > 0) ? remaining : 0;
+  }
+
+  public long getScmUsed() throws IOException{
+    return scmUsage.getUsed();
+  }
+
+  public void shutdown() {
+    saveScmUsed();
+    scmUsedSaved = true;
+
+    if (scmUsage instanceof CachingGetSpaceUsed) {
+      IOUtils.cleanupWithLogger(null, ((CachingGetSpaceUsed) scmUsage));
+    }
+  }
+
+  /**
+   * Read in the cached DU value and return it if it is less than 600 seconds
+   * old (DU update interval). Slight imprecision of scmUsed is not critical
+   * and skipping DU can significantly shorten the startup time.
+   * If the cached value is not available or too old, -1 is returned.
+   */
+  long loadScmUsed() {
+    long cachedScmUsed;
+    long mtime;
+    Scanner sc;
+
+    try {
+      sc = new Scanner(scmUsedFile, "UTF-8");
+    } catch (FileNotFoundException fnfe) {
+      return -1;
+    }
+
+    try {
+      // Get the recorded scmUsed from the file.
+      if (sc.hasNextLong()) {
+        cachedScmUsed = sc.nextLong();
+      } else {
+        return -1;
+      }
+      // Get the recorded mtime from the file.
+      if (sc.hasNextLong()) {
+        mtime = sc.nextLong();
+      } else {
+        return -1;
+      }
+
+      // Return the cached value if mtime is okay.
+      if (mtime > 0 && (Time.now() - mtime < 600000L)) {
+        LOG.info("Cached ScmUsed found for {} : {} ", dataLocation,
+            cachedScmUsed);
+        return cachedScmUsed;
+      }
+      return -1;
+    } finally {
+      sc.close();
+    }
+  }
+
+  /**
+   * Write the current scmUsed to the cache file.
+   */
+  void saveScmUsed() {
+    if (scmUsedFile.exists() && !scmUsedFile.delete()) {
+      LOG.warn("Failed to delete old scmUsed file in {}.", dataLocation);
+    }
+    OutputStreamWriter out = null;
+    try {
+      long used = getScmUsed();
+      if (used > 0) {
+        out = new OutputStreamWriter(new FileOutputStream(scmUsedFile),
+            StandardCharsets.UTF_8);
+        // mtime is written last, so that truncated writes won't be valid.
+        out.write(Long.toString(used) + " " + Long.toString(Time.now()));
+        out.flush();
+        out.close();
+        out = null;
+      }
+    } catch (IOException ioe) {
+      // If write failed, the volume might be bad. Since the cache file is
+      // not critical, log the error and continue.
+      LOG.warn("Failed to write scmUsed to " + scmUsedFile, ioe);
+    } finally {
+      IOUtils.cleanupWithLogger(null, out);
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java
new file mode 100644
index 0000000..d319565
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/Dispatcher.java
@@ -0,0 +1,713 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.impl;
+
+import com.google.common.base.Preconditions;
+import com.google.protobuf.ByteString;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Type;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkUtils;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerMetrics;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.helpers.FileUtils;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.security.NoSuchAlgorithmException;
+import java.util.LinkedList;
+import java.util.List;
+
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .CLOSED_CONTAINER_IO;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .GET_SMALL_FILE_ERROR;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .NO_SUCH_ALGORITHM;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .PUT_SMALL_FILE_ERROR;
+
+/**
+ * Ozone Container dispatcher takes a call from the netty server and routes it
+ * to the right handler function.
+ */
+public class Dispatcher implements ContainerDispatcher {
+  static final Logger LOG = LoggerFactory.getLogger(Dispatcher.class);
+
+  private final ContainerManager containerManager;
+  private ContainerMetrics metrics;
+  private Configuration conf;
+
+  /**
+   * Constructs an OzoneContainer that receives calls from
+   * XceiverServerHandler.
+   *
+   * @param containerManager - A class that manages containers.
+   */
+  public Dispatcher(ContainerManager containerManager, Configuration config) {
+    Preconditions.checkNotNull(containerManager);
+    this.containerManager = containerManager;
+    this.metrics = null;
+    this.conf = config;
+  }
+
+  @Override
+  public void init() {
+    this.metrics = ContainerMetrics.create(conf);
+  }
+
+  @Override
+  public void shutdown() {
+  }
+
+  @Override
+  public ContainerCommandResponseProto dispatch(
+      ContainerCommandRequestProto msg) {
+    LOG.trace("Command {}, trace ID: {} ", msg.getCmdType().toString(),
+        msg.getTraceID());
+    long startNanos = System.nanoTime();
+    ContainerCommandResponseProto resp = null;
+    try {
+      Preconditions.checkNotNull(msg);
+      Type cmdType = msg.getCmdType();
+      metrics.incContainerOpcMetrics(cmdType);
+      if ((cmdType == Type.CreateContainer) ||
+          (cmdType == Type.DeleteContainer) ||
+          (cmdType == Type.ReadContainer) ||
+          (cmdType == Type.ListContainer) ||
+          (cmdType == Type.UpdateContainer) ||
+          (cmdType == Type.CloseContainer)) {
+        resp = containerProcessHandler(msg);
+      }
+
+      if ((cmdType == Type.PutKey) ||
+          (cmdType == Type.GetKey) ||
+          (cmdType == Type.DeleteKey) ||
+          (cmdType == Type.ListKey)) {
+        resp = keyProcessHandler(msg);
+      }
+
+      if ((cmdType == Type.WriteChunk) ||
+          (cmdType == Type.ReadChunk) ||
+          (cmdType == Type.DeleteChunk)) {
+        resp = chunkProcessHandler(msg);
+      }
+
+      if ((cmdType == Type.PutSmallFile) ||
+          (cmdType == Type.GetSmallFile)) {
+        resp = smallFileHandler(msg);
+      }
+
+      if (resp != null) {
+        metrics.incContainerOpsLatencies(cmdType,
+            System.nanoTime() - startNanos);
+        return resp;
+      }
+
+      return ContainerUtils.unsupportedRequest(msg);
+    } catch (StorageContainerException e) {
+      // This useful since the trace ID will allow us to correlate failures.
+      return ContainerUtils.logAndReturnError(LOG, e, msg);
+    } catch (IllegalStateException | NullPointerException e) {
+      return ContainerUtils.logAndReturnError(LOG, e, msg);
+    }
+  }
+
+  public ContainerMetrics getContainerMetrics() {
+    return metrics;
+  }
+
+  /**
+   * Handles the all Container related functionality.
+   *
+   * @param msg - command
+   * @return - response
+   * @throws StorageContainerException
+   */
+  private ContainerCommandResponseProto containerProcessHandler(
+      ContainerCommandRequestProto msg) throws StorageContainerException {
+    try {
+
+      switch (msg.getCmdType()) {
+      case CreateContainer:
+        return handleCreateContainer(msg);
+
+      case DeleteContainer:
+        return handleDeleteContainer(msg);
+
+      case ListContainer:
+        // TODO : Support List Container.
+        return ContainerUtils.unsupportedRequest(msg);
+
+      case UpdateContainer:
+        return handleUpdateContainer(msg);
+
+      case ReadContainer:
+        return handleReadContainer(msg);
+
+      case CloseContainer:
+        return handleCloseContainer(msg);
+
+      default:
+        return ContainerUtils.unsupportedRequest(msg);
+      }
+    } catch (StorageContainerException e) {
+      return ContainerUtils.logAndReturnError(LOG, e, msg);
+    } catch (IOException ex) {
+      LOG.warn("Container operation failed. " +
+              "Container: {} Operation: {}  trace ID: {} Error: {}",
+          msg.getCreateContainer().getContainerData().getName(),
+          msg.getCmdType().name(),
+          msg.getTraceID(),
+          ex.toString(), ex);
+
+      // TODO : Replace with finer error codes.
+      return ContainerUtils.getContainerResponse(msg,
+          ContainerProtos.Result.CONTAINER_INTERNAL_ERROR,
+          ex.toString()).build();
+    }
+  }
+
+  /**
+   * Handles the all key related functionality.
+   *
+   * @param msg - command
+   * @return - response
+   * @throws StorageContainerException
+   */
+  private ContainerCommandResponseProto keyProcessHandler(
+      ContainerCommandRequestProto msg) throws StorageContainerException {
+    try {
+      switch (msg.getCmdType()) {
+      case PutKey:
+        return handlePutKey(msg);
+
+      case GetKey:
+        return handleGetKey(msg);
+
+      case DeleteKey:
+        return handleDeleteKey(msg);
+
+      case ListKey:
+        return ContainerUtils.unsupportedRequest(msg);
+
+      default:
+        return ContainerUtils.unsupportedRequest(msg);
+
+      }
+    } catch (StorageContainerException e) {
+      return ContainerUtils.logAndReturnError(LOG, e, msg);
+    } catch (IOException ex) {
+      LOG.warn("Container operation failed. " +
+              "Container: {} Operation: {}  trace ID: {} Error: {}",
+          msg.getCreateContainer().getContainerData().getName(),
+          msg.getCmdType().name(),
+          msg.getTraceID(),
+          ex.toString(), ex);
+
+      // TODO : Replace with finer error codes.
+      return ContainerUtils.getContainerResponse(msg,
+          ContainerProtos.Result.CONTAINER_INTERNAL_ERROR,
+          ex.toString()).build();
+    }
+  }
+
+  /**
+   * Handles the all chunk related functionality.
+   *
+   * @param msg - command
+   * @return - response
+   * @throws StorageContainerException
+   */
+  private ContainerCommandResponseProto chunkProcessHandler(
+      ContainerCommandRequestProto msg) throws StorageContainerException {
+    try {
+      switch (msg.getCmdType()) {
+      case WriteChunk:
+        return handleWriteChunk(msg);
+
+      case ReadChunk:
+        return handleReadChunk(msg);
+
+      case DeleteChunk:
+        return handleDeleteChunk(msg);
+
+      case ListChunk:
+        return ContainerUtils.unsupportedRequest(msg);
+
+      default:
+        return ContainerUtils.unsupportedRequest(msg);
+      }
+    } catch (StorageContainerException e) {
+      return ContainerUtils.logAndReturnError(LOG, e, msg);
+    } catch (IOException ex) {
+      LOG.warn("Container operation failed. " +
+              "Container: {} Operation: {}  trace ID: {} Error: {}",
+          msg.getCreateContainer().getContainerData().getName(),
+          msg.getCmdType().name(),
+          msg.getTraceID(),
+          ex.toString(), ex);
+
+      // TODO : Replace with finer error codes.
+      return ContainerUtils.getContainerResponse(msg,
+          ContainerProtos.Result.CONTAINER_INTERNAL_ERROR,
+          ex.toString()).build();
+    }
+  }
+
+  /**
+   * Dispatch calls to small file hanlder.
+   * @param msg - request
+   * @return response
+   * @throws StorageContainerException
+   */
+  private ContainerCommandResponseProto smallFileHandler(
+      ContainerCommandRequestProto msg) throws StorageContainerException {
+    switch (msg.getCmdType()) {
+    case PutSmallFile:
+      return handlePutSmallFile(msg);
+    case GetSmallFile:
+      return handleGetSmallFile(msg);
+    default:
+      return ContainerUtils.unsupportedRequest(msg);
+    }
+  }
+
+  /**
+   * Update an existing container with the new container data.
+   *
+   * @param msg Request
+   * @return ContainerCommandResponseProto
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleUpdateContainer(
+      ContainerCommandRequestProto msg)
+      throws IOException {
+    if (!msg.hasUpdateContainer()) {
+      LOG.debug("Malformed read container request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+
+    Pipeline pipeline = Pipeline.getFromProtoBuf(
+        msg.getUpdateContainer().getPipeline());
+    String containerName = msg.getUpdateContainer()
+        .getContainerData().getName();
+
+    ContainerData data = ContainerData.getFromProtBuf(
+        msg.getUpdateContainer().getContainerData(), conf);
+    boolean forceUpdate = msg.getUpdateContainer().getForceUpdate();
+    this.containerManager.updateContainer(
+        pipeline, containerName, data, forceUpdate);
+    return ContainerUtils.getContainerResponse(msg);
+  }
+
+  /**
+   * Calls into container logic and returns appropriate response.
+   *
+   * @param msg - Request
+   * @return ContainerCommandResponseProto
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleReadContainer(
+      ContainerCommandRequestProto msg)
+      throws IOException {
+
+    if (!msg.hasReadContainer()) {
+      LOG.debug("Malformed read container request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+
+    String name = msg.getReadContainer().getName();
+    ContainerData container = this.containerManager.readContainer(name);
+    return ContainerUtils.getReadContainerResponse(msg, container);
+  }
+
+  /**
+   * Calls into container logic and returns appropriate response.
+   *
+   * @param msg - Request
+   * @return Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleDeleteContainer(
+      ContainerCommandRequestProto msg) throws IOException {
+
+    if (!msg.hasDeleteContainer()) {
+      LOG.debug("Malformed delete container request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+
+    Pipeline pipeline = Pipeline.getFromProtoBuf(
+        msg.getDeleteContainer().getPipeline());
+    Preconditions.checkNotNull(pipeline);
+    String name = msg.getDeleteContainer().getName();
+    boolean forceDelete = msg.getDeleteContainer().getForceDelete();
+    this.containerManager.deleteContainer(pipeline, name, forceDelete);
+    return ContainerUtils.getContainerResponse(msg);
+  }
+
+  /**
+   * Calls into container logic and returns appropriate response.
+   *
+   * @param msg - Request
+   * @return Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleCreateContainer(
+      ContainerCommandRequestProto msg) throws IOException {
+    if (!msg.hasCreateContainer()) {
+      LOG.debug("Malformed create container request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+    ContainerData cData = ContainerData.getFromProtBuf(
+        msg.getCreateContainer().getContainerData(), conf);
+    Preconditions.checkNotNull(cData, "Container data is null");
+
+    Pipeline pipeline = Pipeline.getFromProtoBuf(
+        msg.getCreateContainer().getPipeline());
+    Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+
+    this.containerManager.createContainer(pipeline, cData);
+    return ContainerUtils.getContainerResponse(msg);
+  }
+
+  /**
+   * closes an open container.
+   *
+   * @param msg -
+   * @return
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleCloseContainer(
+      ContainerCommandRequestProto msg) throws IOException {
+    try {
+      if (!msg.hasCloseContainer()) {
+        LOG.debug("Malformed close Container request. trace ID: {}",
+            msg.getTraceID());
+        return ContainerUtils.malformedRequest(msg);
+      }
+      Pipeline pipeline = Pipeline.getFromProtoBuf(msg.getCloseContainer()
+          .getPipeline());
+      Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+      if (!this.containerManager.isOpen(pipeline.getContainerName())) {
+        throw new StorageContainerException("Attempting to close a closed " +
+            "container.", CLOSED_CONTAINER_IO);
+      }
+      this.containerManager.closeContainer(pipeline.getContainerName());
+      return ContainerUtils.getContainerResponse(msg);
+    } catch (NoSuchAlgorithmException e) {
+      throw new StorageContainerException("No such Algorithm", e,
+          NO_SUCH_ALGORITHM);
+    }
+  }
+
+  /**
+   * Calls into chunk manager to write a chunk.
+   *
+   * @param msg - Request.
+   * @return Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleWriteChunk(
+      ContainerCommandRequestProto msg) throws IOException {
+    if (!msg.hasWriteChunk()) {
+      LOG.debug("Malformed write chunk request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+    String keyName = msg.getWriteChunk().getKeyName();
+    Pipeline pipeline = Pipeline.getFromProtoBuf(
+        msg.getWriteChunk().getPipeline());
+    Preconditions.checkNotNull(pipeline);
+    if (!this.containerManager.isOpen(pipeline.getContainerName())) {
+      throw new StorageContainerException("Write to closed container.",
+          CLOSED_CONTAINER_IO);
+    }
+
+    ChunkInfo chunkInfo = ChunkInfo.getFromProtoBuf(msg.getWriteChunk()
+        .getChunkData());
+    Preconditions.checkNotNull(chunkInfo);
+    byte[] data = null;
+    if (msg.getWriteChunk().getStage() == ContainerProtos.Stage.WRITE_DATA
+        || msg.getWriteChunk().getStage() == ContainerProtos.Stage.COMBINED) {
+      data = msg.getWriteChunk().getData().toByteArray();
+      metrics.incContainerBytesStats(Type.WriteChunk, data.length);
+
+    }
+    this.containerManager.getChunkManager()
+        .writeChunk(pipeline, keyName, chunkInfo,
+            data, msg.getWriteChunk().getStage());
+
+    return ChunkUtils.getChunkResponse(msg);
+  }
+
+  /**
+   * Calls into chunk manager to read a chunk.
+   *
+   * @param msg - Request.
+   * @return - Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleReadChunk(
+      ContainerCommandRequestProto msg) throws IOException {
+    if (!msg.hasReadChunk()) {
+      LOG.debug("Malformed read chunk request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+
+    String keyName = msg.getReadChunk().getKeyName();
+    Pipeline pipeline = Pipeline.getFromProtoBuf(
+        msg.getReadChunk().getPipeline());
+    Preconditions.checkNotNull(pipeline);
+
+    ChunkInfo chunkInfo = ChunkInfo.getFromProtoBuf(msg.getReadChunk()
+        .getChunkData());
+    Preconditions.checkNotNull(chunkInfo);
+    byte[] data = this.containerManager.getChunkManager().readChunk(pipeline,
+        keyName, chunkInfo);
+    metrics.incContainerBytesStats(Type.ReadChunk, data.length);
+    return ChunkUtils.getReadChunkResponse(msg, data, chunkInfo);
+  }
+
+  /**
+   * Calls into chunk manager to write a chunk.
+   *
+   * @param msg - Request.
+   * @return Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleDeleteChunk(
+      ContainerCommandRequestProto msg) throws IOException {
+    if (!msg.hasDeleteChunk()) {
+      LOG.debug("Malformed delete chunk request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+
+    String keyName = msg.getDeleteChunk().getKeyName();
+    Pipeline pipeline = Pipeline.getFromProtoBuf(
+        msg.getDeleteChunk().getPipeline());
+    Preconditions.checkNotNull(pipeline);
+    if (!this.containerManager.isOpen(pipeline.getContainerName())) {
+      throw new StorageContainerException("Write to closed container.",
+          CLOSED_CONTAINER_IO);
+    }
+    ChunkInfo chunkInfo = ChunkInfo.getFromProtoBuf(msg.getDeleteChunk()
+        .getChunkData());
+    Preconditions.checkNotNull(chunkInfo);
+
+    this.containerManager.getChunkManager().deleteChunk(pipeline, keyName,
+        chunkInfo);
+    return ChunkUtils.getChunkResponse(msg);
+  }
+
+  /**
+   * Put Key handler.
+   *
+   * @param msg - Request.
+   * @return - Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handlePutKey(
+      ContainerCommandRequestProto msg) throws IOException {
+    if (!msg.hasPutKey()) {
+      LOG.debug("Malformed put key request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+    Pipeline pipeline = Pipeline.getFromProtoBuf(msg.getPutKey().getPipeline());
+    Preconditions.checkNotNull(pipeline);
+    if (!this.containerManager.isOpen(pipeline.getContainerName())) {
+      throw new StorageContainerException("Write to closed container.",
+          CLOSED_CONTAINER_IO);
+    }
+    KeyData keyData = KeyData.getFromProtoBuf(msg.getPutKey().getKeyData());
+    Preconditions.checkNotNull(keyData);
+    this.containerManager.getKeyManager().putKey(pipeline, keyData);
+    long numBytes = keyData.getProtoBufMessage().toByteArray().length;
+    metrics.incContainerBytesStats(Type.PutKey, numBytes);
+    return KeyUtils.getKeyResponse(msg);
+  }
+
+  /**
+   * Handle Get Key.
+   *
+   * @param msg - Request.
+   * @return - Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleGetKey(
+      ContainerCommandRequestProto msg) throws IOException {
+    if (!msg.hasGetKey()) {
+      LOG.debug("Malformed get key request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+    KeyData keyData = KeyData.getFromProtoBuf(msg.getGetKey().getKeyData());
+    Preconditions.checkNotNull(keyData);
+    KeyData responseData =
+        this.containerManager.getKeyManager().getKey(keyData);
+    long numBytes = responseData.getProtoBufMessage().toByteArray().length;
+    metrics.incContainerBytesStats(Type.GetKey, numBytes);
+    return KeyUtils.getKeyDataResponse(msg, responseData);
+  }
+
+  /**
+   * Handle Delete Key.
+   *
+   * @param msg - Request.
+   * @return - Response.
+   * @throws IOException
+   */
+  private ContainerCommandResponseProto handleDeleteKey(
+      ContainerCommandRequestProto msg) throws IOException {
+    if (!msg.hasDeleteKey()) {
+      LOG.debug("Malformed delete key request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+    Pipeline pipeline =
+        Pipeline.getFromProtoBuf(msg.getDeleteKey().getPipeline());
+    Preconditions.checkNotNull(pipeline);
+    if (!this.containerManager.isOpen(pipeline.getContainerName())) {
+      throw new StorageContainerException("Write to closed container.",
+          CLOSED_CONTAINER_IO);
+    }
+    String keyName = msg.getDeleteKey().getName();
+    Preconditions.checkNotNull(keyName);
+    Preconditions.checkState(!keyName.isEmpty());
+    this.containerManager.getKeyManager().deleteKey(pipeline, keyName);
+    return KeyUtils.getKeyResponse(msg);
+  }
+
+  /**
+   * Handles writing a chunk and associated key using single RPC.
+   *
+   * @param msg - Message.
+   * @return ContainerCommandResponseProto
+   * @throws StorageContainerException
+   */
+  private ContainerCommandResponseProto handlePutSmallFile(
+      ContainerCommandRequestProto msg) throws StorageContainerException {
+
+    if (!msg.hasPutSmallFile()) {
+      LOG.debug("Malformed put small file request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+    try {
+
+      Pipeline pipeline =
+          Pipeline.getFromProtoBuf(msg.getPutSmallFile()
+              .getKey().getPipeline());
+
+      Preconditions.checkNotNull(pipeline);
+      if (!this.containerManager.isOpen(pipeline.getContainerName())) {
+        throw new StorageContainerException("Write to closed container.",
+            CLOSED_CONTAINER_IO);
+      }
+      KeyData keyData = KeyData.getFromProtoBuf(msg.getPutSmallFile().getKey()
+          .getKeyData());
+      ChunkInfo chunkInfo = ChunkInfo.getFromProtoBuf(msg.getPutSmallFile()
+          .getChunkInfo());
+      byte[] data = msg.getPutSmallFile().getData().toByteArray();
+
+      metrics.incContainerBytesStats(Type.PutSmallFile, data.length);
+      this.containerManager.getChunkManager().writeChunk(pipeline, keyData
+          .getKeyName(), chunkInfo, data, ContainerProtos.Stage.COMBINED);
+      List<ContainerProtos.ChunkInfo> chunks = new LinkedList<>();
+      chunks.add(chunkInfo.getProtoBufMessage());
+      keyData.setChunks(chunks);
+      this.containerManager.getKeyManager().putKey(pipeline, keyData);
+      return FileUtils.getPutFileResponse(msg);
+    } catch (StorageContainerException e) {
+      return ContainerUtils.logAndReturnError(LOG, e, msg);
+    } catch (IOException e) {
+      throw new StorageContainerException("Put Small File Failed.", e,
+          PUT_SMALL_FILE_ERROR);
+    }
+  }
+
+  /**
+   * Handles getting a data stream using a key. This helps in reducing the RPC
+   * overhead for small files.
+   *
+   * @param msg - ContainerCommandRequestProto
+   * @return ContainerCommandResponseProto
+   * @throws StorageContainerException
+   */
+  private ContainerCommandResponseProto handleGetSmallFile(
+      ContainerCommandRequestProto msg) throws StorageContainerException {
+    ByteString dataBuf = ByteString.EMPTY;
+    if (!msg.hasGetSmallFile()) {
+      LOG.debug("Malformed get small file request. trace ID: {}",
+          msg.getTraceID());
+      return ContainerUtils.malformedRequest(msg);
+    }
+    try {
+      Pipeline pipeline =
+          Pipeline.getFromProtoBuf(msg.getGetSmallFile()
+              .getKey().getPipeline());
+
+      long bytes = 0;
+      Preconditions.checkNotNull(pipeline);
+      KeyData keyData = KeyData.getFromProtoBuf(msg.getGetSmallFile()
+          .getKey().getKeyData());
+      KeyData data = this.containerManager.getKeyManager().getKey(keyData);
+      ContainerProtos.ChunkInfo c = null;
+      for (ContainerProtos.ChunkInfo chunk : data.getChunks()) {
+        bytes += chunk.getSerializedSize();
+        ByteString current =
+            ByteString.copyFrom(this.containerManager.getChunkManager()
+                .readChunk(
+                    pipeline, keyData.getKeyName(), ChunkInfo.getFromProtoBuf(
+                        chunk)));
+        dataBuf = dataBuf.concat(current);
+        c = chunk;
+      }
+      metrics.incContainerBytesStats(Type.GetSmallFile, bytes);
+      return FileUtils.getGetSmallFileResponse(msg, dataBuf.toByteArray(),
+          ChunkInfo.getFromProtoBuf(c));
+    } catch (StorageContainerException e) {
+      return ContainerUtils.logAndReturnError(LOG, e, msg);
+    } catch (IOException e) {
+      throw new StorageContainerException("Get Small File Failed", e,
+          GET_SMALL_FILE_ERROR);
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java
new file mode 100644
index 0000000..cf6bf12
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyManagerImpl.java
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.impl;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.interfaces.KeyManager;
+import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
+import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.utils.MetadataStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result
+    .NO_SUCH_KEY;
+
+/**
+ * Key Manager impl.
+ */
+public class KeyManagerImpl implements KeyManager {
+  static final Logger LOG =
+      LoggerFactory.getLogger(KeyManagerImpl.class);
+
+  private static final float LOAD_FACTOR = 0.75f;
+  private final ContainerManager containerManager;
+  private final Configuration conf;
+
+  /**
+   * Constructs a key Manager.
+   *
+   * @param containerManager - Container Manager.
+   */
+  public KeyManagerImpl(ContainerManager containerManager, Configuration conf) {
+    Preconditions.checkNotNull(containerManager, "Container manager cannot be" +
+        " null");
+    Preconditions.checkNotNull(conf, "Config cannot be null");
+    this.containerManager = containerManager;
+    this.conf = conf;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void putKey(Pipeline pipeline, KeyData data) throws IOException {
+    containerManager.readLock();
+    try {
+      // We are not locking the key manager since LevelDb serializes all actions
+      // against a single DB. We rely on DB level locking to avoid conflicts.
+      Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+      String containerName = pipeline.getContainerName();
+      Preconditions.checkNotNull(containerName,
+          "Container name cannot be null");
+      ContainerData cData = containerManager.readContainer(containerName);
+      MetadataStore db = KeyUtils.getDB(cData, conf);
+
+      // This is a post condition that acts as a hint to the user.
+      // Should never fail.
+      Preconditions.checkNotNull(db, "DB cannot be null here");
+      db.put(data.getKeyName().getBytes(KeyUtils.ENCODING), data
+          .getProtoBufMessage().toByteArray());
+    } finally {
+      containerManager.readUnlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public KeyData getKey(KeyData data) throws IOException {
+    containerManager.readLock();
+    try {
+      Preconditions.checkNotNull(data, "Key data cannot be null");
+      Preconditions.checkNotNull(data.getContainerName(),
+          "Container name cannot be null");
+      ContainerData cData = containerManager.readContainer(data
+          .getContainerName());
+      MetadataStore db = KeyUtils.getDB(cData, conf);
+
+      // This is a post condition that acts as a hint to the user.
+      // Should never fail.
+      Preconditions.checkNotNull(db, "DB cannot be null here");
+
+      byte[] kData = db.get(data.getKeyName().getBytes(KeyUtils.ENCODING));
+      if (kData == null) {
+        throw new StorageContainerException("Unable to find the key.",
+            NO_SUCH_KEY);
+      }
+      ContainerProtos.KeyData keyData =
+          ContainerProtos.KeyData.parseFrom(kData);
+      return KeyData.getFromProtoBuf(keyData);
+    } finally {
+      containerManager.readUnlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void deleteKey(Pipeline pipeline, String keyName)
+      throws IOException {
+    containerManager.readLock();
+    try {
+      Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+      String containerName = pipeline.getContainerName();
+      Preconditions.checkNotNull(containerName,
+          "Container name cannot be null");
+      ContainerData cData = containerManager.readContainer(containerName);
+      MetadataStore db = KeyUtils.getDB(cData, conf);
+
+      // This is a post condition that acts as a hint to the user.
+      // Should never fail.
+      Preconditions.checkNotNull(db, "DB cannot be null here");
+      // Note : There is a race condition here, since get and delete
+      // are not atomic. Leaving it here since the impact is refusing
+      // to delete a key which might have just gotten inserted after
+      // the get check.
+
+      byte[] kData = db.get(keyName.getBytes(KeyUtils.ENCODING));
+      if (kData == null) {
+        throw new StorageContainerException("Unable to find the key.",
+            NO_SUCH_KEY);
+      }
+      db.delete(keyName.getBytes(KeyUtils.ENCODING));
+    } finally {
+      containerManager.readUnlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<KeyData> listKey(
+      Pipeline pipeline, String prefix, String startKey, int count)
+      throws IOException {
+    Preconditions.checkNotNull(pipeline,
+        "Pipeline cannot be null.");
+    Preconditions.checkArgument(count > 0,
+        "Count must be a positive number.");
+    ContainerData cData = containerManager.readContainer(pipeline
+        .getContainerName());
+    MetadataStore db = KeyUtils.getDB(cData, conf);
+
+    List<KeyData> result = new ArrayList<KeyData>();
+    byte[] startKeyInBytes = startKey == null ? null :
+        DFSUtil.string2Bytes(startKey);
+    MetadataKeyFilter prefixFilter = new KeyPrefixFilter(prefix);
+    List<Map.Entry<byte[], byte[]>> range =
+        db.getSequentialRangeKVs(startKeyInBytes, count, prefixFilter);
+    for (Map.Entry<byte[], byte[]> entry : range) {
+      String keyName = KeyUtils.getKeyName(entry.getKey());
+      KeyData value = KeyUtils.getKeyData(entry.getValue());
+      KeyData data = new KeyData(value.getContainerName(), keyName);
+      result.add(data);
+    }
+    return result;
+  }
+
+  /**
+   * Shutdown keyManager.
+   */
+  @Override
+  public void shutdown() {
+    Preconditions.checkState(this.containerManager.hasWriteLock(), "asserts " +
+        "that we are holding the container manager lock when shutting down.");
+    KeyUtils.shutdownCache(ContainerCache.getInstance(conf));
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java
new file mode 100644
index 0000000..3e267d2
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.impl;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerDeletionChoosingPolicy;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Randomly choosing containers for block deletion.
+ */
+public class RandomContainerDeletionChoosingPolicy
+    implements ContainerDeletionChoosingPolicy {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RandomContainerDeletionChoosingPolicy.class);
+
+  @Override
+  public List<ContainerData> chooseContainerForBlockDeletion(int count,
+      Map<String, ContainerStatus> candidateContainers)
+      throws StorageContainerException {
+    Preconditions.checkNotNull(candidateContainers,
+        "Internal assertion: candidate containers cannot be null");
+
+    int currentCount = 0;
+    List<ContainerData> result = new LinkedList<>();
+    ContainerStatus[] values = new ContainerStatus[candidateContainers.size()];
+    // to get a shuffle list
+    for (ContainerStatus entry : DFSUtil.shuffle(
+        candidateContainers.values().toArray(values))) {
+      if (currentCount < count) {
+        result.add(entry.getContainer());
+        currentCount++;
+
+        LOG.debug("Select container {} for block deletion, "
+            + "pending deletion blocks num: {}.",
+            entry.getContainer().getContainerName(),
+            entry.getNumPendingDeletionBlocks());
+      } else {
+        break;
+      }
+    }
+
+    return result;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
new file mode 100644
index 0000000..7ef91a9
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.impl;
+
+/**
+ * Storage location stats of datanodes that provide back store for containers.
+ *
+ */
+public class StorageLocationReport {
+  public static final StorageLocationReport[] EMPTY_ARRAY = {};
+
+  private final String id;
+  private final boolean failed;
+  private final long capacity;
+  private final long scmUsed;
+  private final long remaining;
+
+  public StorageLocationReport(String id, boolean failed,
+      long capacity, long scmUsed, long remaining) {
+    this.id = id;
+    this.failed = failed;
+    this.capacity = capacity;
+    this.scmUsed = scmUsed;
+    this.remaining = remaining;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public boolean isFailed() {
+    return failed;
+  }
+
+  public long getCapacity() {
+    return capacity;
+  }
+
+  public long getScmUsed() {
+    return scmUsed;
+  }
+
+  public long getRemaining() {
+    return remaining;
+  }
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java
new file mode 100644
index 0000000..0169a96
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.impl;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.interfaces
+    .ContainerDeletionChoosingPolicy;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * TopN Ordered choosing policy that choosing containers based on pending
+ * deletion blocks' number.
+ */
+public class TopNOrderedContainerDeletionChoosingPolicy
+    implements ContainerDeletionChoosingPolicy {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TopNOrderedContainerDeletionChoosingPolicy.class);
+
+  /** customized comparator used to compare differentiate container status. **/
+  private static final Comparator<ContainerStatus> CONTAINER_STATUS_COMPARATOR
+      = new Comparator<ContainerStatus>() {
+        @Override
+        public int compare(ContainerStatus c1, ContainerStatus c2) {
+          return Integer.compare(c2.getNumPendingDeletionBlocks(),
+              c1.getNumPendingDeletionBlocks());
+        }
+      };
+
+  @Override
+  public List<ContainerData> chooseContainerForBlockDeletion(int count,
+      Map<String, ContainerStatus> candidateContainers)
+      throws StorageContainerException {
+    Preconditions.checkNotNull(candidateContainers,
+        "Internal assertion: candidate containers cannot be null");
+
+    List<ContainerData> result = new LinkedList<>();
+    List<ContainerStatus> orderedList = new LinkedList<>();
+    orderedList.addAll(candidateContainers.values());
+    Collections.sort(orderedList, CONTAINER_STATUS_COMPARATOR);
+
+    // get top N list ordered by pending deletion blocks' number
+    int currentCount = 0;
+    for (ContainerStatus entry : orderedList) {
+      if (currentCount < count) {
+        if (entry.getNumPendingDeletionBlocks() > 0) {
+          result.add(entry.getContainer());
+          currentCount++;
+
+          LOG.debug(
+              "Select container {} for block deletion, "
+                  + "pending deletion blocks num: {}.",
+              entry.getContainer().getContainerName(),
+              entry.getNumPendingDeletionBlocks());
+        } else {
+          LOG.debug("Stop looking for next container, there is no"
+              + " pending deletion block contained in remaining containers.");
+          break;
+        }
+      } else {
+        break;
+      }
+    }
+
+    return result;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java
new file mode 100644
index 0000000..16da5d9
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.impl;
+
+/**
+ This package is contains Ozone container implementation.
+**/
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java
new file mode 100644
index 0000000..f55d74c
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ChunkManager.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
+
+/**
+ * Chunk Manager allows read, write, delete and listing of chunks in
+ * a container.
+ */
+public interface ChunkManager {
+
+  /**
+   * writes a given chunk.
+   * @param pipeline - Name and the set of machines that make this container.
+   * @param keyName - Name of the Key.
+   * @param info - ChunkInfo.
+   * @param stage - Chunk Stage write.
+   * @throws StorageContainerException
+   */
+  void writeChunk(Pipeline pipeline, String keyName,
+                  ChunkInfo info, byte[] data, ContainerProtos.Stage stage)
+      throws StorageContainerException;
+
+  /**
+   * reads the data defined by a chunk.
+   * @param pipeline - container pipeline.
+   * @param keyName - Name of the Key
+   * @param info - ChunkInfo.
+   * @return  byte array
+   * @throws StorageContainerException
+   *
+   * TODO: Right now we do not support partial reads and writes of chunks.
+   * TODO: Explore if we need to do that for ozone.
+   */
+  byte[] readChunk(Pipeline pipeline, String keyName, ChunkInfo info) throws
+      StorageContainerException;
+
+  /**
+   * Deletes a given chunk.
+   * @param pipeline  - Pipeline.
+   * @param keyName   - Key Name
+   * @param info  - Chunk Info
+   * @throws StorageContainerException
+   */
+  void deleteChunk(Pipeline pipeline, String keyName, ChunkInfo info) throws
+      StorageContainerException;
+
+  // TODO : Support list operations.
+
+  /**
+   * Shutdown the chunkManager.
+   */
+  void shutdown();
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java
new file mode 100644
index 0000000..f7280e2
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.impl.ContainerStatus;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This interface is used for choosing desired containers for
+ * block deletion.
+ */
+public interface ContainerDeletionChoosingPolicy {
+
+  /**
+   * Chooses desired containers for block deletion.
+   * @param count
+   *          how many to return
+   * @param candidateContainers
+   *          candidate containers collection
+   * @return container data list
+   * @throws StorageContainerException
+   */
+  List<ContainerData> chooseContainerForBlockDeletion(int count,
+      Map<String, ContainerStatus> candidateContainers)
+      throws StorageContainerException;
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java
new file mode 100644
index 0000000..984fe41
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDispatcher.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+
+/**
+ * Dispatcher acts as the bridge between the transport layer and
+ * the actual container layer. This layer is capable of transforming
+ * protobuf objects into corresponding class and issue the function call
+ * into the lower layers.
+ *
+ * The reply from the request is dispatched to the client.
+ */
+public interface ContainerDispatcher {
+  /**
+   * Dispatches commands to container layer.
+   * @param msg - Command Request
+   * @return Command Response
+   */
+  ContainerCommandResponseProto dispatch(ContainerCommandRequestProto msg);
+
+  /**
+   * Initialize the Dispatcher.
+   */
+  void init();
+
+  /**
+   * Shutdown Dispatcher services.
+   */
+  void shutdown();
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java
new file mode 100644
index 0000000..9c5fcea
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManager.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.ozone.container.common.impl.StorageLocationReport;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+/**
+ * Returns physical path locations, where the containers will be created.
+ */
+public interface ContainerLocationManager {
+  /**
+   * Returns the path where the container should be placed from a set of
+   * locations.
+   *
+   * @return A path where we should place this container and metadata.
+   * @throws IOException
+   */
+  Path getContainerPath() throws IOException;
+
+  /**
+   * Returns the path where the container Data file are stored.
+   *
+   * @return a path where we place the LevelDB and data files of a container.
+   * @throws IOException
+   */
+  Path getDataPath(String containerName) throws IOException;
+
+  /**
+   * Returns an array of storage location usage report.
+   * @return storage location usage report.
+   */
+  StorageLocationReport[] getLocationReport() throws IOException;
+
+  /**
+   * Supports clean shutdown of container.
+   *
+   * @throws IOException
+   */
+  void shutdown() throws IOException;
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java
new file mode 100644
index 0000000..88e6148
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerLocationManagerMXBean.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.ozone.container.common.impl.StorageLocationReport;
+
+import java.io.IOException;
+
+/**
+ * Returns physical path locations, where the containers will be created.
+ */
+public interface ContainerLocationManagerMXBean {
+
+  /**
+   * Returns an array of storage location usage report.
+   *
+   * @return storage location usage report.
+   */
+  StorageLocationReport[] getLocationReport() throws IOException;
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
new file mode 100644
index 0000000..2ff636e
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
@@ -0,0 +1,280 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.hdfs.util.RwLock;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+
+import java.io.IOException;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+
+/**
+ * Interface for container operations.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public interface ContainerManager extends RwLock {
+
+  /**
+   * Init call that sets up a container Manager.
+   *
+   * @param config        - Configuration.
+   * @param containerDirs - List of Metadata Container locations.
+   * @param datanodeDetails - DatanodeDetails
+   * @throws StorageContainerException
+   */
+  void init(Configuration config, List<StorageLocation> containerDirs,
+            DatanodeDetails datanodeDetails) throws IOException;
+
+  /**
+   * Creates a container with the given name.
+   *
+   * @param pipeline      -- Nodes which make up this container.
+   * @param containerData - Container Name and metadata.
+   * @throws StorageContainerException
+   */
+  void createContainer(Pipeline pipeline, ContainerData containerData)
+      throws StorageContainerException;
+
+  /**
+   * Deletes an existing container.
+   *
+   * @param pipeline      - nodes that make this container.
+   * @param containerName - name of the container.
+   * @param forceDelete   - whether this container should be deleted forcibly.
+   * @throws StorageContainerException
+   */
+  void deleteContainer(Pipeline pipeline, String containerName,
+      boolean forceDelete) throws StorageContainerException;
+
+  /**
+   * Update an existing container.
+   *
+   * @param pipeline container nodes
+   * @param containerName name of the container
+   * @param data container data
+   * @param forceUpdate if true, update container forcibly.
+   * @throws StorageContainerException
+   */
+  void updateContainer(Pipeline pipeline, String containerName,
+      ContainerData data, boolean forceUpdate) throws StorageContainerException;
+
+  /**
+   * As simple interface for container Iterations.
+   *
+   * @param prefix - Return only values matching this prefix
+   * @param count   - how many to return
+   * @param prevKey - Previous key - Server returns results from this point.
+   * @param data    - Actual containerData
+   * @throws StorageContainerException
+   */
+  void listContainer(String prefix, long count, String prevKey,
+                     List<ContainerData> data)
+      throws StorageContainerException;
+
+  /**
+   * Choose containers for block deletion.
+   *
+   * @param count   - how many to return
+   * @throws StorageContainerException
+   */
+  List<ContainerData> chooseContainerForBlockDeletion(int count)
+      throws StorageContainerException;
+
+  /**
+   * Get metadata about a specific container.
+   *
+   * @param containerName - Name of the container
+   * @return ContainerData - Container Data.
+   * @throws StorageContainerException
+   */
+  ContainerData readContainer(String containerName)
+      throws StorageContainerException;
+
+  /**
+   * Closes a open container, if it is already closed or does not exist a
+   * StorageContainerException is thrown.
+   * @param containerName - Name of the container.
+   * @throws StorageContainerException
+   */
+  void closeContainer(String containerName)
+      throws StorageContainerException, NoSuchAlgorithmException;
+
+  /**
+   * Checks if a container exists.
+   * @param containerName - Name of the container.
+   * @return true if the container is open false otherwise.
+   * @throws StorageContainerException  - Throws Exception if we are not
+   * able to find the container.
+   */
+  boolean isOpen(String containerName) throws StorageContainerException;
+
+  /**
+   * Supports clean shutdown of container.
+   *
+   * @throws StorageContainerException
+   */
+  void shutdown() throws IOException;
+
+  /**
+   * Sets the Chunk Manager.
+   *
+   * @param chunkManager - ChunkManager.
+   */
+  void setChunkManager(ChunkManager chunkManager);
+
+  /**
+   * Gets the Chunk Manager.
+   *
+   * @return ChunkManager.
+   */
+  ChunkManager getChunkManager();
+
+  /**
+   * Sets the Key Manager.
+   *
+   * @param keyManager - Key Manager.
+   */
+  void setKeyManager(KeyManager keyManager);
+
+  /**
+   * Gets the Key Manager.
+   *
+   * @return KeyManager.
+   */
+  KeyManager getKeyManager();
+
+  /**
+   * Get the Node Report of container storage usage.
+   * @return node report.
+   */
+  SCMNodeReport getNodeReport() throws IOException;
+
+  /**
+   * Gets container report.
+   * @return container report.
+   * @throws IOException
+   */
+  ContainerReportsRequestProto getContainerReport() throws IOException;
+
+  /**
+   * Gets container reports.
+   * @return List of all closed containers.
+   * @throws IOException
+   */
+  List<ContainerData> getContainerReports() throws IOException;
+
+  /**
+   * Increase pending deletion blocks count number of specified container.
+   *
+   * @param numBlocks
+   *          increment  count number
+   * @param containerId
+   *          container id
+   */
+  void incrPendingDeletionBlocks(int numBlocks, String containerId);
+
+  /**
+   * Decrease pending deletion blocks count number of specified container.
+   *
+   * @param numBlocks
+   *          decrement count number
+   * @param containerId
+   *          container id
+   */
+  void decrPendingDeletionBlocks(int numBlocks, String containerId);
+
+  /**
+   * Increase the read count of the container.
+   * @param containerName - Name of the container.
+   */
+  void incrReadCount(String containerName);
+
+  /**
+   * Increse the read counter for bytes read from the container.
+   * @param containerName - Name of the container.
+   * @param readBytes - bytes read from the container.
+   */
+  void incrReadBytes(String containerName, long readBytes);
+
+
+  /**
+   * Increase the write count of the container.
+   * @param containerName - Name of the container.
+   */
+  void incrWriteCount(String containerName);
+
+  /**
+   * Increase the write counter for bytes write into the container.
+   * @param containerName - Name of the container.
+   * @param writeBytes - bytes write into the container.
+   */
+  void incrWriteBytes(String containerName, long writeBytes);
+
+  /**
+   * Increase the bytes used by the container.
+   * @param containerName - Name of the container.
+   * @param used - additional bytes used by the container.
+   * @return the current bytes used.
+   */
+  long incrBytesUsed(String containerName, long used);
+
+  /**
+   * Decrease the bytes used by the container.
+   * @param containerName - Name of the container.
+   * @param used - additional bytes reclaimed by the container.
+   * @return the current bytes used.
+   */
+  long decrBytesUsed(String containerName, long used);
+
+  /**
+   * Get the bytes used by the container.
+   * @param containerName - Name of the container.
+   * @return the current bytes used by the container.
+   */
+  long getBytesUsed(String containerName);
+
+  /**
+   * Get the number of keys in the container.
+   * @param containerName - Name of the container.
+   * @return the current key count.
+   */
+  long getNumKeys(String containerName);
+
+  /**
+   * Get the container report state to send via HB to SCM.
+   * @return container report state.
+   */
+  ReportState getContainerReportState();
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java
new file mode 100644
index 0000000..4689dfe
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerReportManager.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState;
+
+/**
+ * Interface for container report manager operations.
+ */
+public interface ContainerReportManager {
+
+  /**
+   * Get the container report state.
+   * @return the container report state.
+   */
+  ReportState getContainerReportState();
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java
new file mode 100644
index 0000000..8c27ba9
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.interfaces;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * KeyManager deals with Key Operations in the container Level.
+ */
+public interface KeyManager {
+  /**
+   * Puts or overwrites a key.
+   *
+   * @param pipeline - Pipeline.
+   * @param data     - Key Data.
+   * @throws IOException
+   */
+  void putKey(Pipeline pipeline, KeyData data) throws IOException;
+
+  /**
+   * Gets an existing key.
+   *
+   * @param data - Key Data.
+   * @return Key Data.
+   * @throws IOException
+   */
+  KeyData getKey(KeyData data) throws IOException;
+
+  /**
+   * Deletes an existing Key.
+   *
+   * @param pipeline - Pipeline.
+   * @param keyName  Key Data.
+   * @throws StorageContainerException
+   */
+  void deleteKey(Pipeline pipeline, String keyName)
+      throws IOException;
+
+  /**
+   * List keys in a container.
+   *
+   * @param pipeline - pipeline.
+   * @param prefix   - Prefix in needed.
+   * @param startKey  - Key to start from, EMPTY_STRING to begin.
+   * @param count    - Number of keys to return.
+   * @return List of Keys that match the criteria.
+   */
+  List<KeyData> listKey(Pipeline pipeline, String prefix, String startKey,
+      int count) throws IOException;
+
+  /**
+   * Shutdown keyManager.
+   */
+  void shutdown();
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java
new file mode 100644
index 0000000..d83bf95
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/package-info.java
@@ -0,0 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.interfaces;
+/**
+ This package contains common ozone container interfaces.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java
new file mode 100644
index 0000000..1638a36
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/package-info.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common;
+/**
+  Common Container Layer. At this layer the abstractions are:
+
+ 1. Containers - Both data and metadata containers.
+ 2. Keys - Key/Value pairs that live inside a container.
+ 3. Chunks - Keys can be composed of many chunks.
+
+ Ozone uses these abstractions to build Volumes, Buckets and Keys.
+
+ **/
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
new file mode 100644
index 0000000..ef1ba59
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
@@ -0,0 +1,397 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.container.common.statemachine.commandhandler
+    .CloseContainerHandler;
+import org.apache.hadoop.ozone.container.common.statemachine.commandhandler
+    .CommandDispatcher;
+import org.apache.hadoop.ozone.container.common.statemachine.commandhandler
+    .ContainerReportHandler;
+import org.apache.hadoop.ozone.container.common.statemachine.commandhandler
+    .DeleteBlocksCommandHandler;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.concurrent.HadoopExecutors;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static org.apache.hadoop.hdds.scm.HddsServerUtil.getScmHeartbeatInterval;
+
+/**
+ * State Machine Class.
+ */
+public class DatanodeStateMachine implements Closeable {
+  @VisibleForTesting
+  static final Logger LOG =
+      LoggerFactory.getLogger(DatanodeStateMachine.class);
+  private final ExecutorService executorService;
+  private final Configuration conf;
+  private final SCMConnectionManager connectionManager;
+  private final long heartbeatFrequency;
+  private StateContext context;
+  private final OzoneContainer container;
+  private DatanodeDetails datanodeDetails;
+  private final CommandDispatcher commandDispatcher;
+  private long commandsHandled;
+  private AtomicLong nextHB;
+  private Thread stateMachineThread = null;
+  private Thread cmdProcessThread = null;
+
+  /**
+   * Constructs a a datanode state machine.
+   *
+   * @param datanodeDetails - DatanodeDetails used to identify a datanode
+   * @param conf - Configuration.
+   */
+  public DatanodeStateMachine(DatanodeDetails datanodeDetails,
+      Configuration conf) throws IOException {
+    this.conf = conf;
+    this.datanodeDetails = datanodeDetails;
+    executorService = HadoopExecutors.newCachedThreadPool(
+                new ThreadFactoryBuilder().setDaemon(true)
+            .setNameFormat("Datanode State Machine Thread - %d").build());
+    connectionManager = new SCMConnectionManager(conf);
+    context = new StateContext(this.conf, DatanodeStates.getInitState(), this);
+    heartbeatFrequency = TimeUnit.SECONDS.toMillis(
+        getScmHeartbeatInterval(conf));
+    container = new OzoneContainer(this.datanodeDetails,
+        new OzoneConfiguration(conf));
+    nextHB = new AtomicLong(Time.monotonicNow());
+
+     // When we add new handlers just adding a new handler here should do the
+     // trick.
+    commandDispatcher = CommandDispatcher.newBuilder()
+        .addHandler(new ContainerReportHandler())
+        .addHandler(new CloseContainerHandler())
+        .addHandler(new DeleteBlocksCommandHandler(
+            container.getContainerManager(), conf))
+        .setConnectionManager(connectionManager)
+        .setContainer(container)
+        .setContext(context)
+        .build();
+  }
+
+  /**
+   *
+   * Return DatanodeDetails if set, return null otherwise.
+   *
+   * @return DatanodeDetails
+   */
+  public DatanodeDetails getDatanodeDetails() {
+    return datanodeDetails;
+  }
+
+
+  /**
+   * Returns the Connection manager for this state machine.
+   *
+   * @return - SCMConnectionManager.
+   */
+  public SCMConnectionManager getConnectionManager() {
+    return connectionManager;
+  }
+
+  public OzoneContainer getContainer() {
+    return this.container;
+  }
+
+  /**
+   * Runs the state machine at a fixed frequency.
+   */
+  private void start() throws IOException {
+    long now = 0;
+
+    container.start();
+    initCommandHandlerThread(conf);
+    while (context.getState() != DatanodeStates.SHUTDOWN) {
+      try {
+        LOG.debug("Executing cycle Number : {}", context.getExecutionCount());
+        nextHB.set(Time.monotonicNow() + heartbeatFrequency);
+        context.setReportState(container.getNodeReport());
+        context.setContainerReportState(container.getContainerReportState());
+        context.execute(executorService, heartbeatFrequency,
+            TimeUnit.MILLISECONDS);
+        now = Time.monotonicNow();
+        if (now < nextHB.get()) {
+          Thread.sleep(nextHB.get() - now);
+        }
+      } catch (InterruptedException e) {
+        // Ignore this exception.
+      } catch (Exception e) {
+        LOG.error("Unable to finish the execution.", e);
+      }
+    }
+  }
+
+  /**
+   * Gets the current context.
+   *
+   * @return StateContext
+   */
+  public StateContext getContext() {
+    return context;
+  }
+
+  /**
+   * Sets the current context.
+   *
+   * @param context - Context
+   */
+  public void setContext(StateContext context) {
+    this.context = context;
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated with it. If
+   * the stream is already closed then invoking this method has no effect.
+   * <p>
+   * <p> As noted in {@link AutoCloseable#close()}, cases where the close may
+   * fail require careful attention. It is strongly advised to relinquish the
+   * underlying resources and to internally <em>mark</em> the {@code Closeable}
+   * as closed, prior to throwing the {@code IOException}.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+    if (stateMachineThread != null) {
+      stateMachineThread.interrupt();
+    }
+    if (cmdProcessThread != null) {
+      cmdProcessThread.interrupt();
+    }
+    context.setState(DatanodeStates.getLastState());
+    executorService.shutdown();
+    try {
+      if (!executorService.awaitTermination(5, TimeUnit.SECONDS)) {
+        executorService.shutdownNow();
+      }
+
+      if (!executorService.awaitTermination(5, TimeUnit.SECONDS)) {
+        LOG.error("Unable to shutdown state machine properly.");
+      }
+    } catch (InterruptedException e) {
+      LOG.error("Error attempting to shutdown.", e);
+      executorService.shutdownNow();
+      Thread.currentThread().interrupt();
+    }
+
+    if (connectionManager != null) {
+      connectionManager.close();
+    }
+
+    if(container != null) {
+      container.stop();
+    }
+  }
+
+  /**
+   * States that a datanode  can be in. GetNextState will move this enum from
+   * getInitState to getLastState.
+   */
+  public enum DatanodeStates {
+    INIT(1),
+    RUNNING(2),
+    SHUTDOWN(3);
+    private final int value;
+
+    /**
+     * Constructs states.
+     *
+     * @param value  Enum Value
+     */
+    DatanodeStates(int value) {
+      this.value = value;
+    }
+
+    /**
+     * Returns the first State.
+     *
+     * @return First State.
+     */
+    public static DatanodeStates getInitState() {
+      return INIT;
+    }
+
+    /**
+     * The last state of endpoint states.
+     *
+     * @return last state.
+     */
+    public static DatanodeStates getLastState() {
+      return SHUTDOWN;
+    }
+
+    /**
+     * returns the numeric value associated with the endPoint.
+     *
+     * @return int.
+     */
+    public int getValue() {
+      return value;
+    }
+
+    /**
+     * Returns the next logical state that endPoint should move to. This
+     * function assumes the States are sequentially numbered.
+     *
+     * @return NextState.
+     */
+    public DatanodeStates getNextState() {
+      if (this.value < getLastState().getValue()) {
+        int stateValue = this.getValue() + 1;
+        for (DatanodeStates iter : values()) {
+          if (stateValue == iter.getValue()) {
+            return iter;
+          }
+        }
+      }
+      return getLastState();
+    }
+  }
+
+  /**
+   * Start datanode state machine as a single thread daemon.
+   */
+  public void startDaemon() {
+    Runnable startStateMachineTask = () -> {
+      try {
+        start();
+        LOG.info("Ozone container server started.");
+      } catch (Exception ex) {
+        LOG.error("Unable to start the DatanodeState Machine", ex);
+      }
+    };
+    stateMachineThread =  new ThreadFactoryBuilder()
+        .setDaemon(true)
+        .setNameFormat("Datanode State Machine Thread - %d")
+        .build().newThread(startStateMachineTask);
+    stateMachineThread.start();
+  }
+
+  /**
+   * Waits for DatanodeStateMachine to exit.
+   *
+   * @throws InterruptedException
+   */
+  public void join() throws InterruptedException {
+    stateMachineThread.join();
+    cmdProcessThread.join();
+  }
+
+  /**
+   * Stop the daemon thread of the datanode state machine.
+   */
+  public synchronized void stopDaemon() {
+    try {
+      context.setState(DatanodeStates.SHUTDOWN);
+      this.close();
+      LOG.info("Ozone container server stopped.");
+    } catch (IOException e) {
+      LOG.error("Stop ozone container server failed.", e);
+    }
+  }
+
+  /**
+   *
+   * Check if the datanode state machine daemon is stopped.
+   *
+   * @return True if datanode state machine daemon is stopped
+   * and false otherwise.
+   */
+  @VisibleForTesting
+  public boolean isDaemonStopped() {
+    return this.executorService.isShutdown()
+        && this.getContext().getExecutionCount() == 0
+        && this.getContext().getState() == DatanodeStates.SHUTDOWN;
+  }
+
+  /**
+   * Create a command handler thread.
+   *
+   * @param config
+   */
+  private void initCommandHandlerThread(Configuration config) {
+
+    /**
+     * Task that periodically checks if we have any outstanding commands.
+     * It is assumed that commands can be processed slowly and in order.
+     * This assumption might change in future. Right now due to this assumption
+     * we have single command  queue process thread.
+     */
+    Runnable processCommandQueue = () -> {
+      long now;
+      while (getContext().getState() != DatanodeStates.SHUTDOWN) {
+        SCMCommand command = getContext().getNextCommand();
+        if (command != null) {
+          commandDispatcher.handle(command);
+          commandsHandled++;
+        } else {
+          try {
+            // Sleep till the next HB + 1 second.
+            now = Time.monotonicNow();
+            if (nextHB.get() > now) {
+              Thread.sleep((nextHB.get() - now) + 1000L);
+            }
+          } catch (InterruptedException e) {
+            // Ignore this exception.
+          }
+        }
+      }
+    };
+
+    // We will have only one thread for command processing in a datanode.
+    cmdProcessThread = getCommandHandlerThread(processCommandQueue);
+    cmdProcessThread.start();
+  }
+
+  private Thread getCommandHandlerThread(Runnable processCommandQueue) {
+    Thread handlerThread = new Thread(processCommandQueue);
+    handlerThread.setDaemon(true);
+    handlerThread.setName("Command processor thread");
+    handlerThread.setUncaughtExceptionHandler((Thread t, Throwable e) -> {
+      // Let us just restart this thread after logging a critical error.
+      // if this thread is not running we cannot handle commands from SCM.
+      LOG.error("Critical Error : Command processor thread encountered an " +
+          "error. Thread: {}", t.toString(), e);
+      getCommandHandlerThread(processCommandQueue).start();
+    });
+    return handlerThread;
+  }
+
+  /**
+   * Returns the number of commands handled  by the datanode.
+   * @return  count
+   */
+  @VisibleForTesting
+  public long getCommandHandled() {
+    return commandsHandled;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java
new file mode 100644
index 0000000..7e85923
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachine.java
@@ -0,0 +1,294 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.protocol.VersionResponse;
+import org.apache.hadoop.ozone.protocolPB
+    .StorageContainerDatanodeProtocolClientSideTranslatorPB;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.time.ZonedDateTime;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import static org.apache.hadoop.hdds.scm.HddsServerUtil.getLogWarnInterval;
+import static org.apache.hadoop.hdds.scm.HddsServerUtil.getScmHeartbeatInterval;
+
+/**
+ * Endpoint is used as holder class that keeps state around the RPC endpoint.
+ */
+public class EndpointStateMachine
+    implements Closeable, EndpointStateMachineMBean {
+  static final Logger
+      LOG = LoggerFactory.getLogger(EndpointStateMachine.class);
+  private final StorageContainerDatanodeProtocolClientSideTranslatorPB endPoint;
+  private final AtomicLong missedCount;
+  private final InetSocketAddress address;
+  private final Lock lock;
+  private final Configuration conf;
+  private EndPointStates state;
+  private VersionResponse version;
+  private ZonedDateTime lastSuccessfulHeartbeat;
+
+  /**
+   * Constructs RPC Endpoints.
+   *
+   * @param endPoint - RPC endPoint.
+   */
+  public EndpointStateMachine(InetSocketAddress address,
+      StorageContainerDatanodeProtocolClientSideTranslatorPB endPoint,
+      Configuration conf) {
+    this.endPoint = endPoint;
+    this.missedCount = new AtomicLong(0);
+    this.address = address;
+    state = EndPointStates.getInitState();
+    lock = new ReentrantLock();
+    this.conf = conf;
+  }
+
+  /**
+   * Takes a lock on this EndPoint so that other threads don't use this while we
+   * are trying to communicate via this endpoint.
+   */
+  public void lock() {
+    lock.lock();
+  }
+
+  /**
+   * Unlocks this endpoint.
+   */
+  public void unlock() {
+    lock.unlock();
+  }
+
+  /**
+   * Returns the version that we read from the server if anyone asks .
+   *
+   * @return - Version Response.
+   */
+  public VersionResponse getVersion() {
+    return version;
+  }
+
+  /**
+   * Sets the Version reponse we recieved from the SCM.
+   *
+   * @param version VersionResponse
+   */
+  public void setVersion(VersionResponse version) {
+    this.version = version;
+  }
+
+  /**
+   * Returns the current State this end point is in.
+   *
+   * @return - getState.
+   */
+  public EndPointStates getState() {
+    return state;
+  }
+
+  @Override
+  public int getVersionNumber() {
+    if (version != null) {
+      return version.getProtobufMessage().getSoftwareVersion();
+    } else {
+      return -1;
+    }
+  }
+
+  /**
+   * Sets the endpoint state.
+   *
+   * @param epState - end point state.
+   */
+  public EndPointStates setState(EndPointStates epState) {
+    this.state = epState;
+    return this.state;
+  }
+
+  /**
+   * Closes the connection.
+   *
+   * @throws IOException
+   */
+  @Override
+  public void close() throws IOException {
+    if (endPoint != null) {
+      endPoint.close();
+    }
+  }
+
+  /**
+   * We maintain a count of how many times we missed communicating with a
+   * specific SCM. This is not made atomic since the access to this is always
+   * guarded by the read or write lock. That is, it is serialized.
+   */
+  public void incMissed() {
+    this.missedCount.incrementAndGet();
+  }
+
+  /**
+   * Returns the value of the missed count.
+   *
+   * @return int
+   */
+  public long getMissedCount() {
+    return this.missedCount.get();
+  }
+
+  @Override
+  public String getAddressString() {
+    return getAddress().toString();
+  }
+
+  public void zeroMissedCount() {
+    this.missedCount.set(0);
+  }
+
+  /**
+   * Returns the InetAddress of the endPoint.
+   *
+   * @return - EndPoint.
+   */
+  public InetSocketAddress getAddress() {
+    return this.address;
+  }
+
+  /**
+   * Returns real RPC endPoint.
+   *
+   * @return rpc client.
+   */
+  public StorageContainerDatanodeProtocolClientSideTranslatorPB
+      getEndPoint() {
+    return endPoint;
+  }
+
+  /**
+   * Returns the string that represents this endpoint.
+   *
+   * @return - String
+   */
+  public String toString() {
+    return address.toString();
+  }
+
+  /**
+   * Logs exception if needed.
+   *  @param ex         - Exception
+   */
+  public void logIfNeeded(Exception ex) {
+    LOG.trace("Incrementing the Missed count. Ex : {}", ex);
+    this.incMissed();
+    if (this.getMissedCount() % getLogWarnInterval(conf) ==
+        0) {
+      LOG.warn("Unable to communicate to SCM server at {}. We have not been " +
+              "able to communicate to this SCM server for past {} seconds.",
+          this.getAddress().getHostString() + ":" + this.getAddress().getPort(),
+          this.getMissedCount() * getScmHeartbeatInterval(
+              this.conf));
+    }
+  }
+
+
+  /**
+   * States that an Endpoint can be in.
+   * <p>
+   * This is a sorted list of states that EndPoint will traverse.
+   * <p>
+   * GetNextState will move this enum from getInitState to getLastState.
+   */
+  public enum EndPointStates {
+    GETVERSION(1),
+    REGISTER(2),
+    HEARTBEAT(3),
+    SHUTDOWN(4); // if you add value after this please edit getLastState too.
+    private final int value;
+
+    /**
+     * Constructs endPointStates.
+     *
+     * @param value  state.
+     */
+    EndPointStates(int value) {
+      this.value = value;
+    }
+
+    /**
+     * Returns the first State.
+     *
+     * @return First State.
+     */
+    public static EndPointStates getInitState() {
+      return GETVERSION;
+    }
+
+    /**
+     * The last state of endpoint states.
+     *
+     * @return last state.
+     */
+    public static EndPointStates getLastState() {
+      return SHUTDOWN;
+    }
+
+    /**
+     * returns the numeric value associated with the endPoint.
+     *
+     * @return int.
+     */
+    public int getValue() {
+      return value;
+    }
+
+    /**
+     * Returns the next logical state that endPoint should move to.
+     * The next state is computed by adding 1 to the current state.
+     *
+     * @return NextState.
+     */
+    public EndPointStates getNextState() {
+      if (this.getValue() < getLastState().getValue()) {
+        int stateValue = this.getValue() + 1;
+        for (EndPointStates iter : values()) {
+          if (stateValue == iter.getValue()) {
+            return iter;
+          }
+        }
+      }
+      return getLastState();
+    }
+  }
+
+  public long getLastSuccessfulHeartbeat() {
+    return lastSuccessfulHeartbeat == null ?
+        0 :
+        lastSuccessfulHeartbeat.toEpochSecond();
+  }
+
+  public void setLastSuccessfulHeartbeat(
+      ZonedDateTime lastSuccessfulHeartbeat) {
+    this.lastSuccessfulHeartbeat = lastSuccessfulHeartbeat;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java
new file mode 100644
index 0000000..4f64bde
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/EndpointStateMachineMBean.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine;
+
+
+/**
+ * JMX representation of an EndpointStateMachine.
+ */
+public interface EndpointStateMachineMBean {
+
+  long getMissedCount();
+
+  String getAddressString();
+
+  EndpointStateMachine.EndPointStates getState();
+
+  int getVersionNumber();
+
+  long getLastSuccessfulHeartbeat();
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java
new file mode 100644
index 0000000..19722f0
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManager.java
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.protocolPB
+    .StorageContainerDatanodeProtocolClientSideTranslatorPB;
+import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.management.ObjectName;
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import static org.apache.hadoop.hdds.scm.HddsServerUtil
+    .getScmRpcTimeOutInMilliseconds;
+
+/**
+ * SCMConnectionManager - Acts as a class that manages the membership
+ * information of the SCMs that we are working with.
+ */
+public class SCMConnectionManager
+    implements Closeable, SCMConnectionManagerMXBean {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SCMConnectionManager.class);
+
+  private final ReadWriteLock mapLock;
+  private final Map<InetSocketAddress, EndpointStateMachine> scmMachines;
+
+  private final int rpcTimeout;
+  private final Configuration conf;
+  private final ObjectName jmxBean;
+
+  public SCMConnectionManager(Configuration conf) {
+    this.mapLock = new ReentrantReadWriteLock();
+    Long timeOut = getScmRpcTimeOutInMilliseconds(conf);
+    this.rpcTimeout = timeOut.intValue();
+    this.scmMachines = new HashMap<>();
+    this.conf = conf;
+    jmxBean = MBeans.register("OzoneDataNode",
+        "SCMConnectionManager",
+        this);
+  }
+
+
+  /**
+   * Returns Config.
+   *
+   * @return ozoneConfig.
+   */
+  public Configuration getConf() {
+    return conf;
+  }
+
+  /**
+   * Get RpcTimeout.
+   *
+   * @return - Return RPC timeout.
+   */
+  public int getRpcTimeout() {
+    return rpcTimeout;
+  }
+
+
+  /**
+   * Takes a read lock.
+   */
+  public void readLock() {
+    this.mapLock.readLock().lock();
+  }
+
+  /**
+   * Releases the read lock.
+   */
+  public void readUnlock() {
+    this.mapLock.readLock().unlock();
+  }
+
+  /**
+   * Takes the write lock.
+   */
+  public void writeLock() {
+    this.mapLock.writeLock().lock();
+  }
+
+  /**
+   * Releases the write lock.
+   */
+  public void writeUnlock() {
+    this.mapLock.writeLock().unlock();
+  }
+
+  /**
+   * adds a new SCM machine to the target set.
+   *
+   * @param address - Address of the SCM machine to send heatbeat to.
+   * @throws IOException
+   */
+  public void addSCMServer(InetSocketAddress address) throws IOException {
+    writeLock();
+    try {
+      if (scmMachines.containsKey(address)) {
+        LOG.warn("Trying to add an existing SCM Machine to Machines group. " +
+            "Ignoring the request.");
+        return;
+      }
+      RPC.setProtocolEngine(conf, StorageContainerDatanodeProtocolPB.class,
+          ProtobufRpcEngine.class);
+      long version =
+          RPC.getProtocolVersion(StorageContainerDatanodeProtocolPB.class);
+
+      StorageContainerDatanodeProtocolPB rpcProxy = RPC.getProxy(
+          StorageContainerDatanodeProtocolPB.class, version,
+          address, UserGroupInformation.getCurrentUser(), conf,
+          NetUtils.getDefaultSocketFactory(conf), getRpcTimeout());
+
+      StorageContainerDatanodeProtocolClientSideTranslatorPB rpcClient =
+          new StorageContainerDatanodeProtocolClientSideTranslatorPB(rpcProxy);
+
+      EndpointStateMachine endPoint =
+          new EndpointStateMachine(address, rpcClient, conf);
+      scmMachines.put(address, endPoint);
+    } finally {
+      writeUnlock();
+    }
+  }
+
+  /**
+   * Removes a  SCM machine for the target set.
+   *
+   * @param address - Address of the SCM machine to send heatbeat to.
+   * @throws IOException
+   */
+  public void removeSCMServer(InetSocketAddress address) throws IOException {
+    writeLock();
+    try {
+      if (!scmMachines.containsKey(address)) {
+        LOG.warn("Trying to remove a non-existent SCM machine. " +
+            "Ignoring the request.");
+        return;
+      }
+
+      EndpointStateMachine endPoint = scmMachines.get(address);
+      endPoint.close();
+      scmMachines.remove(address);
+    } finally {
+      writeUnlock();
+    }
+  }
+
+  /**
+   * Returns all known RPCEndpoints.
+   *
+   * @return - List of RPC Endpoints.
+   */
+  public Collection<EndpointStateMachine> getValues() {
+    return scmMachines.values();
+  }
+
+  @Override
+  public void close() throws IOException {
+    getValues().forEach(endpointStateMachine
+        -> IOUtils.cleanupWithLogger(LOG, endpointStateMachine));
+    MBeans.unregister(jmxBean);
+  }
+
+  @Override
+  public List<EndpointStateMachineMBean> getSCMServers() {
+    readLock();
+    try {
+      return Collections
+          .unmodifiableList(new ArrayList<>(scmMachines.values()));
+
+    } finally {
+      readUnlock();
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java
new file mode 100644
index 0000000..25ef163
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/SCMConnectionManagerMXBean.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine;
+
+import java.util.List;
+
+/**
+ * JMX information about the connected SCM servers.
+ */
+public interface SCMConnectionManagerMXBean {
+
+  List<EndpointStateMachineMBean> getSCMServers();
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java
new file mode 100644
index 0000000..55476fd
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/StateContext.java
@@ -0,0 +1,285 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.ozone.container.common.states.DatanodeState;
+import org.apache.hadoop.ozone.container.common.states.datanode
+    .InitDatanodeState;
+import org.apache.hadoop.ozone.container.common.states.datanode
+    .RunningDatanodeState;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.LinkedList;
+import java.util.Queue;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import static org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState.states
+    .noContainerReports;
+import static org.apache.hadoop.ozone.OzoneConsts.INVALID_PORT;
+
+/**
+ * Current Context of State Machine.
+ */
+public class StateContext {
+  static final Logger LOG =
+      LoggerFactory.getLogger(StateContext.class);
+  private final Queue<SCMCommand> commandQueue;
+  private final Lock lock;
+  private final DatanodeStateMachine parent;
+  private final AtomicLong stateExecutionCount;
+  private final Configuration conf;
+  private DatanodeStateMachine.DatanodeStates state;
+  private SCMNodeReport nrState;
+  private ReportState  reportState;
+  private static final ReportState DEFAULT_REPORT_STATE =
+      ReportState.newBuilder().setState(noContainerReports).setCount(0).build();
+
+  /**
+   * Constructs a StateContext.
+   *
+   * @param conf   - Configration
+   * @param state  - State
+   * @param parent Parent State Machine
+   */
+  public StateContext(Configuration conf, DatanodeStateMachine.DatanodeStates
+      state, DatanodeStateMachine parent) {
+    this.conf = conf;
+    this.state = state;
+    this.parent = parent;
+    commandQueue = new LinkedList<>();
+    lock = new ReentrantLock();
+    stateExecutionCount = new AtomicLong(0);
+    nrState = SCMNodeReport.getDefaultInstance();
+  }
+
+  /**
+   * Returns the ContainerStateMachine class that holds this state.
+   *
+   * @return ContainerStateMachine.
+   */
+  public DatanodeStateMachine getParent() {
+    return parent;
+  }
+
+  /**
+   * Get the container server port.
+   * @return The container server port if available, return -1 if otherwise
+   */
+  public int getContainerPort() {
+    return parent == null ?
+        INVALID_PORT : parent.getContainer().getContainerServerPort();
+  }
+
+  /**
+   * Gets the Ratis Port.
+   * @return int , return -1 if not valid.
+   */
+  public int getRatisPort() {
+    return parent == null ?
+        INVALID_PORT : parent.getContainer().getRatisContainerServerPort();
+  }
+
+  /**
+   * Returns true if we are entering a new state.
+   *
+   * @return boolean
+   */
+  boolean isEntering() {
+    return stateExecutionCount.get() == 0;
+  }
+
+  /**
+   * Returns true if we are exiting from the current state.
+   *
+   * @param newState - newState.
+   * @return boolean
+   */
+  boolean isExiting(DatanodeStateMachine.DatanodeStates newState) {
+    boolean isExiting = state != newState && stateExecutionCount.get() > 0;
+    if(isExiting) {
+      stateExecutionCount.set(0);
+    }
+    return isExiting;
+  }
+
+  /**
+   * Returns the current state the machine is in.
+   *
+   * @return state.
+   */
+  public DatanodeStateMachine.DatanodeStates getState() {
+    return state;
+  }
+
+  /**
+   * Sets the current state of the machine.
+   *
+   * @param state state.
+   */
+  public void setState(DatanodeStateMachine.DatanodeStates state) {
+    this.state = state;
+  }
+
+  /**
+   * Returns the node report of the datanode state context.
+   * @return the node report.
+   */
+  public SCMNodeReport getNodeReport() {
+    return nrState;
+  }
+
+  /**
+   * Sets the storage location report of the datanode state context.
+   * @param nrReport - node report
+   */
+  public void setReportState(SCMNodeReport nrReport) {
+    this.nrState = nrReport;
+  }
+
+  /**
+   * Returns the next task to get executed by the datanode state machine.
+   * @return A callable that will be executed by the
+   * {@link DatanodeStateMachine}
+   */
+  @SuppressWarnings("unchecked")
+  public DatanodeState<DatanodeStateMachine.DatanodeStates> getTask() {
+    switch (this.state) {
+    case INIT:
+      return new InitDatanodeState(this.conf, parent.getConnectionManager(),
+          this);
+    case RUNNING:
+      return new RunningDatanodeState(this.conf, parent.getConnectionManager(),
+          this);
+    case SHUTDOWN:
+      return null;
+    default:
+      throw new IllegalArgumentException("Not Implemented yet.");
+    }
+  }
+
+  /**
+   * Executes the required state function.
+   *
+   * @param service - Executor Service
+   * @param time    - seconds to wait
+   * @param unit    - Seconds.
+   * @throws InterruptedException
+   * @throws ExecutionException
+   * @throws TimeoutException
+   */
+  public void execute(ExecutorService service, long time, TimeUnit unit)
+      throws InterruptedException, ExecutionException, TimeoutException {
+    stateExecutionCount.incrementAndGet();
+    DatanodeState<DatanodeStateMachine.DatanodeStates> task = getTask();
+    if (this.isEntering()) {
+      task.onEnter();
+    }
+    task.execute(service);
+    DatanodeStateMachine.DatanodeStates newState = task.await(time, unit);
+    if (this.state != newState) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Task {} executed, state transited from {} to {}",
+            task.getClass().getSimpleName(), this.state, newState);
+      }
+      if (isExiting(newState)) {
+        task.onExit();
+      }
+      this.clearReportState();
+      this.setState(newState);
+    }
+  }
+
+  /**
+   * Returns the next command or null if it is empty.
+   *
+   * @return SCMCommand or Null.
+   */
+  public SCMCommand getNextCommand() {
+    lock.lock();
+    try {
+      return commandQueue.poll();
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Adds a command to the State Machine queue.
+   *
+   * @param command - SCMCommand.
+   */
+  public void addCommand(SCMCommand command) {
+    lock.lock();
+    try {
+      commandQueue.add(command);
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Returns the count of the Execution.
+   * @return long
+   */
+  public long getExecutionCount() {
+    return stateExecutionCount.get();
+  }
+
+
+  /**
+   * Gets the ReportState.
+   * @return ReportState.
+   */
+  public synchronized  ReportState getContainerReportState() {
+    if (reportState == null) {
+      return DEFAULT_REPORT_STATE;
+    }
+    return reportState;
+  }
+
+  /**
+   * Sets the ReportState.
+   * @param rState - ReportState.
+   */
+  public synchronized  void setContainerReportState(ReportState rState) {
+    this.reportState = rState;
+  }
+
+  /**
+   * Clears report state after it has been communicated.
+   */
+  public synchronized void clearReportState() {
+    if(reportState != null) {
+      setContainerReportState(null);
+    }
+  }
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java
new file mode 100644
index 0000000..ac95b2a
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/BlockDeletingService.java
@@ -0,0 +1,239 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.statemachine.background;
+
+import com.google.common.collect.Lists;
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BackgroundService;
+import org.apache.hadoop.utils.BackgroundTask;
+import org.apache.hadoop.utils.BackgroundTaskQueue;
+import org.apache.hadoop.utils.BackgroundTaskResult;
+import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.utils.MetadataStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER_DEFAULT;
+
+/**
+ * A per-datanode container block deleting service takes in charge
+ * of deleting staled ozone blocks.
+ */
+public class BlockDeletingService extends BackgroundService{
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BlockDeletingService.class);
+
+  private final ContainerManager containerManager;
+  private final Configuration conf;
+
+  // Throttle number of blocks to delete per task,
+  // set to 1 for testing
+  private final int blockLimitPerTask;
+
+  // Throttle the number of containers to process concurrently at a time,
+  private final int containerLimitPerInterval;
+
+  // Task priority is useful when a to-delete block has weight.
+  private final static int TASK_PRIORITY_DEFAULT = 1;
+  // Core pool size for container tasks
+  private final static int BLOCK_DELETING_SERVICE_CORE_POOL_SIZE = 10;
+
+  public BlockDeletingService(ContainerManager containerManager,
+      long serviceInterval, long serviceTimeout, Configuration conf) {
+    super("BlockDeletingService", serviceInterval,
+        TimeUnit.MILLISECONDS, BLOCK_DELETING_SERVICE_CORE_POOL_SIZE,
+        serviceTimeout);
+    this.containerManager = containerManager;
+    this.conf = conf;
+    this.blockLimitPerTask = conf.getInt(
+        OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER,
+        OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER_DEFAULT);
+    this.containerLimitPerInterval = conf.getInt(
+        OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL,
+        OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT);
+  }
+
+  @Override
+  public BackgroundTaskQueue getTasks() {
+    BackgroundTaskQueue queue = new BackgroundTaskQueue();
+    List<ContainerData> containers = Lists.newArrayList();
+    try {
+      // We at most list a number of containers a time,
+      // in case there are too many containers and start too many workers.
+      // We must ensure there is no empty container in this result.
+      // The chosen result depends on what container deletion policy is
+      // configured.
+      containers = containerManager.chooseContainerForBlockDeletion(
+          containerLimitPerInterval);
+      LOG.info("Plan to choose {} containers for block deletion, "
+          + "actually returns {} valid containers.",
+          containerLimitPerInterval, containers.size());
+
+      for(ContainerData container : containers) {
+        BlockDeletingTask containerTask =
+            new BlockDeletingTask(container, TASK_PRIORITY_DEFAULT);
+        queue.add(containerTask);
+      }
+    } catch (StorageContainerException e) {
+      LOG.warn("Failed to initiate block deleting tasks, "
+          + "caused by unable to get containers info. "
+          + "Retry in next interval. ", e);
+    } catch (Exception e) {
+      // In case listContainer call throws any uncaught RuntimeException.
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Unexpected error occurs during deleting blocks.", e);
+      }
+    }
+    return queue;
+  }
+
+  private static class ContainerBackgroundTaskResult
+      implements BackgroundTaskResult {
+    private List<String> deletedBlockIds;
+
+    ContainerBackgroundTaskResult() {
+      deletedBlockIds = new LinkedList<>();
+    }
+
+    public void addBlockId(String blockId) {
+      deletedBlockIds.add(blockId);
+    }
+
+    public void addAll(List<String> blockIds) {
+      deletedBlockIds.addAll(blockIds);
+    }
+
+    public List<String> getDeletedBlocks() {
+      return deletedBlockIds;
+    }
+
+    @Override
+    public int getSize() {
+      return deletedBlockIds.size();
+    }
+  }
+
+  private class BlockDeletingTask
+      implements BackgroundTask<BackgroundTaskResult> {
+
+    private final int priority;
+    private final ContainerData containerData;
+
+    BlockDeletingTask(ContainerData containerName, int priority) {
+      this.priority = priority;
+      this.containerData = containerName;
+    }
+
+    @Override
+    public BackgroundTaskResult call() throws Exception {
+      ContainerBackgroundTaskResult crr = new ContainerBackgroundTaskResult();
+      long startTime = Time.monotonicNow();
+      // Scan container's db and get list of under deletion blocks
+      MetadataStore meta = KeyUtils.getDB(containerData, conf);
+      // # of blocks to delete is throttled
+      KeyPrefixFilter filter = new KeyPrefixFilter(
+          OzoneConsts.DELETING_KEY_PREFIX);
+      List<Map.Entry<byte[], byte[]>> toDeleteBlocks =
+          meta.getSequentialRangeKVs(null, blockLimitPerTask, filter);
+      if (toDeleteBlocks.isEmpty()) {
+        LOG.debug("No under deletion block found in container : {}",
+            containerData.getContainerName());
+      }
+
+      List<String> succeedBlocks = new LinkedList<>();
+      LOG.debug("Container : {}, To-Delete blocks : {}",
+          containerData.getContainerName(), toDeleteBlocks.size());
+      File dataDir = ContainerUtils.getDataDirectory(containerData).toFile();
+      if (!dataDir.exists() || !dataDir.isDirectory()) {
+        LOG.error("Invalid container data dir {} : "
+            + "not exist or not a directory", dataDir.getAbsolutePath());
+        return crr;
+      }
+
+      toDeleteBlocks.forEach(entry -> {
+        String blockName = DFSUtil.bytes2String(entry.getKey());
+        LOG.debug("Deleting block {}", blockName);
+        try {
+          ContainerProtos.KeyData data =
+              ContainerProtos.KeyData.parseFrom(entry.getValue());
+          for (ContainerProtos.ChunkInfo chunkInfo : data.getChunksList()) {
+            File chunkFile = dataDir.toPath()
+                .resolve(chunkInfo.getChunkName()).toFile();
+            if (FileUtils.deleteQuietly(chunkFile)) {
+              LOG.debug("block {} chunk {} deleted", blockName,
+                  chunkFile.getAbsolutePath());
+            }
+          }
+          succeedBlocks.add(blockName);
+        } catch (InvalidProtocolBufferException e) {
+          LOG.error("Failed to parse block info for block {}", blockName, e);
+        }
+      });
+
+      // Once files are deleted ... clean up DB
+      BatchOperation batch = new BatchOperation();
+      succeedBlocks.forEach(entry ->
+          batch.delete(DFSUtil.string2Bytes(entry)));
+      meta.writeBatch(batch);
+      // update count of pending deletion blocks in in-memory container status
+      containerManager.decrPendingDeletionBlocks(succeedBlocks.size(),
+          containerData.getContainerName());
+
+      if (!succeedBlocks.isEmpty()) {
+        LOG.info("Container: {}, deleted blocks: {}, task elapsed time: {}ms",
+            containerData.getContainerName(), succeedBlocks.size(),
+            Time.monotonicNow() - startTime);
+      }
+      crr.addAll(succeedBlocks);
+      return crr;
+    }
+
+    @Override
+    public int getPriority() {
+      return priority;
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java
new file mode 100644
index 0000000..a9e202e
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/background/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine.background;
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java
new file mode 100644
index 0000000..f7b49b7
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerHandler.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
+
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCloseContainerCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Container Report handler.
+ */
+public class CloseContainerHandler implements CommandHandler {
+  static final Logger LOG =
+      LoggerFactory.getLogger(CloseContainerHandler.class);
+  private int invocationCount;
+  private long totalTime;
+
+  /**
+   * Constructs a ContainerReport handler.
+   */
+  public CloseContainerHandler() {
+  }
+
+  /**
+   * Handles a given SCM command.
+   *
+   * @param command           - SCM Command
+   * @param container         - Ozone Container.
+   * @param context           - Current Context.
+   * @param connectionManager - The SCMs that we are talking to.
+   */
+  @Override
+  public void handle(SCMCommand command, OzoneContainer container,
+      StateContext context, SCMConnectionManager connectionManager) {
+    LOG.debug("Processing Close Container command.");
+    invocationCount++;
+    long startTime = Time.monotonicNow();
+    String containerName = "UNKNOWN";
+    try {
+
+      SCMCloseContainerCmdResponseProto
+          closeContainerProto =
+          SCMCloseContainerCmdResponseProto
+              .parseFrom(command.getProtoBufMessage());
+      containerName = closeContainerProto.getContainerName();
+
+      container.getContainerManager().closeContainer(containerName);
+
+    } catch (Exception e) {
+      LOG.error("Can't close container " + containerName, e);
+    } finally {
+      long endTime = Time.monotonicNow();
+      totalTime += endTime - startTime;
+    }
+  }
+
+  /**
+   * Returns the command type that this command handler handles.
+   *
+   * @return Type
+   */
+  @Override
+  public SCMCmdType getCommandType() {
+    return SCMCmdType.closeContainerCommand;
+  }
+
+  /**
+   * Returns number of times this handler has been invoked.
+   *
+   * @return int
+   */
+  @Override
+  public int getInvocationCount() {
+    return invocationCount;
+  }
+
+  /**
+   * Returns the average time this function takes to run.
+   *
+   * @return long
+   */
+  @Override
+  public long getAverageRunTime() {
+    if (invocationCount > 0) {
+      return totalTime / invocationCount;
+    }
+    return 0;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java
new file mode 100644
index 0000000..40feca3
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandDispatcher.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Dispatches command to the correct handler.
+ */
+public final class CommandDispatcher {
+  static final Logger LOG =
+      LoggerFactory.getLogger(CommandDispatcher.class);
+  private final StateContext context;
+  private final Map<SCMCmdType, CommandHandler> handlerMap;
+  private final OzoneContainer container;
+  private final SCMConnectionManager connectionManager;
+
+  /**
+   * Constructs a command Dispatcher.
+   * @param context - Context.
+   */
+  /**
+   * Constructs a command dispatcher.
+   *
+   * @param container - Ozone Container
+   * @param context - Context
+   * @param handlers - Set of handlers.
+   */
+  private CommandDispatcher(OzoneContainer container, SCMConnectionManager
+      connectionManager, StateContext context,
+      CommandHandler... handlers) {
+    Preconditions.checkNotNull(context);
+    Preconditions.checkNotNull(handlers);
+    Preconditions.checkArgument(handlers.length > 0);
+    Preconditions.checkNotNull(container);
+    Preconditions.checkNotNull(connectionManager);
+    this.context = context;
+    this.container = container;
+    this.connectionManager = connectionManager;
+    handlerMap = new HashMap<>();
+    for (CommandHandler h : handlers) {
+      if(handlerMap.containsKey(h.getCommandType())){
+        LOG.error("Duplicate handler for the same command. Exiting. Handle " +
+            "key : { }", h.getCommandType().getDescriptorForType().getName());
+        throw new IllegalArgumentException("Duplicate handler for the same " +
+            "command.");
+      }
+      handlerMap.put(h.getCommandType(), h);
+    }
+  }
+
+  /**
+   * Dispatch the command to the correct handler.
+   *
+   * @param command - SCM Command.
+   */
+  public void handle(SCMCommand command) {
+    Preconditions.checkNotNull(command);
+    CommandHandler handler = handlerMap.get(command.getType());
+    if (handler != null) {
+      handler.handle(command, container, context, connectionManager);
+    } else {
+      LOG.error("Unknown SCM Command queued. There is no handler for this " +
+          "command. Command: {}", command.getType().getDescriptorForType()
+          .getName());
+    }
+  }
+
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Helper class to construct command dispatcher.
+   */
+  public static class Builder {
+    private final List<CommandHandler> handlerList;
+    private OzoneContainer container;
+    private StateContext context;
+    private SCMConnectionManager connectionManager;
+
+    public Builder() {
+      handlerList = new LinkedList<>();
+    }
+
+    /**
+     * Adds a handler.
+     *
+     * @param handler - handler
+     * @return Builder
+     */
+    public Builder addHandler(CommandHandler handler) {
+      Preconditions.checkNotNull(handler);
+      handlerList.add(handler);
+      return this;
+    }
+
+    /**
+     * Add the OzoneContainer.
+     *
+     * @param ozoneContainer - ozone container.
+     * @return Builder
+     */
+    public Builder setContainer(OzoneContainer ozoneContainer) {
+      Preconditions.checkNotNull(ozoneContainer);
+      this.container = ozoneContainer;
+      return this;
+    }
+
+    /**
+     * Set the Connection Manager.
+     *
+     * @param scmConnectionManager
+     * @return this
+     */
+    public Builder setConnectionManager(SCMConnectionManager
+        scmConnectionManager) {
+      Preconditions.checkNotNull(scmConnectionManager);
+      this.connectionManager = scmConnectionManager;
+      return this;
+    }
+
+    /**
+     * Sets the Context.
+     *
+     * @param stateContext - StateContext
+     * @return this
+     */
+    public Builder setContext(StateContext stateContext) {
+      Preconditions.checkNotNull(stateContext);
+      this.context = stateContext;
+      return this;
+    }
+
+    /**
+     * Builds a command Dispatcher.
+     * @return Command Dispatcher.
+     */
+    public CommandDispatcher build() {
+      Preconditions.checkNotNull(this.connectionManager, "Missing connection" +
+          " manager.");
+      Preconditions.checkNotNull(this.container, "Missing container.");
+      Preconditions.checkNotNull(this.context, "Missing context.");
+      Preconditions.checkArgument(this.handlerList.size() > 0);
+      return new CommandDispatcher(this.container, this.connectionManager,
+          this.context, handlerList.toArray(
+              new CommandHandler[handlerList.size()]));
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java
new file mode 100644
index 0000000..13d9f72
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CommandHandler.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
+
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+
+/**
+ * Generic interface for handlers.
+ */
+public interface CommandHandler {
+
+  /**
+   * Handles a given SCM command.
+   * @param command - SCM Command
+   * @param container - Ozone Container.
+   * @param context - Current Context.
+   * @param connectionManager - The SCMs that we are talking to.
+   */
+  void handle(SCMCommand command, OzoneContainer container,
+      StateContext context, SCMConnectionManager connectionManager);
+
+  /**
+   * Returns the command type that this command handler handles.
+   * @return Type
+   */
+  SCMCmdType getCommandType();
+
+  /**
+   * Returns number of times this handler has been invoked.
+   * @return int
+   */
+  int getInvocationCount();
+
+  /**
+   * Returns the average time this function takes to run.
+   * @return  long
+   */
+  long getAverageRunTime();
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java
new file mode 100644
index 0000000..ba6b418
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/ContainerReportHandler.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
+
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+/**
+ * Container Report handler.
+ */
+public class ContainerReportHandler implements CommandHandler {
+  static final Logger LOG =
+      LoggerFactory.getLogger(ContainerReportHandler.class);
+  private int invocationCount;
+  private long totalTime;
+
+  /**
+   * Constructs a ContainerReport handler.
+   */
+  public ContainerReportHandler() {
+  }
+
+  /**
+   * Handles a given SCM command.
+   *
+   * @param command - SCM Command
+   * @param container - Ozone Container.
+   * @param context - Current Context.
+   * @param connectionManager - The SCMs that we are talking to.
+   */
+  @Override
+  public void handle(SCMCommand command, OzoneContainer container,
+      StateContext context, SCMConnectionManager connectionManager) {
+    LOG.debug("Processing Container Report.");
+    invocationCount++;
+    long startTime = Time.monotonicNow();
+    try {
+      ContainerReportsRequestProto contianerReport =
+          container.getContainerReport();
+
+      // TODO : We send this report to all SCMs.Check if it is enough only to
+      // send to the leader once we have RAFT enabled SCMs.
+      for (EndpointStateMachine endPoint : connectionManager.getValues()) {
+        endPoint.getEndPoint().sendContainerReport(contianerReport);
+      }
+    } catch (IOException ex) {
+      LOG.error("Unable to process the Container Report command.", ex);
+    } finally {
+      long endTime = Time.monotonicNow();
+      totalTime += endTime - startTime;
+    }
+  }
+
+  /**
+   * Returns the command type that this command handler handles.
+   *
+   * @return Type
+   */
+  @Override
+  public SCMCmdType getCommandType() {
+    return SCMCmdType.sendContainerReport;
+  }
+
+  /**
+   * Returns number of times this handler has been invoked.
+   *
+   * @return int
+   */
+  @Override
+  public int getInvocationCount() {
+    return invocationCount;
+  }
+
+  /**
+   * Returns the average time this function takes to run.
+   *
+   * @return long
+   */
+  @Override
+  public long getAverageRunTime() {
+    if (invocationCount > 0) {
+      return totalTime / invocationCount;
+    }
+    return 0;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java
new file mode 100644
index 0000000..f106e3d
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto
+    .DeleteBlockTransactionResult;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers
+    .DeletedContainerBlocksSummary;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.utils.MetadataStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Handle block deletion commands.
+ */
+public class DeleteBlocksCommandHandler implements CommandHandler {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DeleteBlocksCommandHandler.class);
+
+  private ContainerManager containerManager;
+  private Configuration conf;
+  private int invocationCount;
+  private long totalTime;
+
+  public DeleteBlocksCommandHandler(ContainerManager containerManager,
+      Configuration conf) {
+    this.containerManager = containerManager;
+    this.conf = conf;
+  }
+
+  @Override
+  public void handle(SCMCommand command, OzoneContainer container,
+      StateContext context, SCMConnectionManager connectionManager) {
+    if (command.getType() != SCMCmdType.deleteBlocksCommand) {
+      LOG.warn("Skipping handling command, expected command "
+              + "type {} but found {}",
+          SCMCmdType.deleteBlocksCommand, command.getType());
+      return;
+    }
+    LOG.debug("Processing block deletion command.");
+    invocationCount++;
+    long startTime = Time.monotonicNow();
+
+    // move blocks to deleting state.
+    // this is a metadata update, the actual deletion happens in another
+    // recycling thread.
+    DeleteBlocksCommand cmd = (DeleteBlocksCommand) command;
+    List<DeletedBlocksTransaction> containerBlocks = cmd.blocksTobeDeleted();
+
+
+    DeletedContainerBlocksSummary summary =
+        DeletedContainerBlocksSummary.getFrom(containerBlocks);
+    LOG.info("Start to delete container blocks, TXIDs={}, "
+            + "numOfContainers={}, numOfBlocks={}",
+        summary.getTxIDSummary(),
+        summary.getNumOfContainers(),
+        summary.getNumOfBlocks());
+
+    ContainerBlocksDeletionACKProto.Builder resultBuilder =
+        ContainerBlocksDeletionACKProto.newBuilder();
+    containerBlocks.forEach(entry -> {
+      DeleteBlockTransactionResult.Builder txResultBuilder =
+          DeleteBlockTransactionResult.newBuilder();
+      txResultBuilder.setTxID(entry.getTxID());
+      try {
+        deleteContainerBlocks(entry, conf);
+        txResultBuilder.setSuccess(true);
+      } catch (IOException e) {
+        LOG.warn("Failed to delete blocks for container={}, TXID={}",
+            entry.getContainerName(), entry.getTxID(), e);
+        txResultBuilder.setSuccess(false);
+      }
+      resultBuilder.addResults(txResultBuilder.build());
+    });
+    ContainerBlocksDeletionACKProto blockDeletionACK = resultBuilder.build();
+
+    // Send ACK back to SCM as long as meta updated
+    // TODO Or we should wait until the blocks are actually deleted?
+    if (!containerBlocks.isEmpty()) {
+      for (EndpointStateMachine endPoint : connectionManager.getValues()) {
+        try {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("Sending following block deletion ACK to SCM");
+            for (DeleteBlockTransactionResult result :
+                blockDeletionACK.getResultsList()) {
+              LOG.debug(result.getTxID() + " : " + result.getSuccess());
+            }
+          }
+          endPoint.getEndPoint()
+              .sendContainerBlocksDeletionACK(blockDeletionACK);
+        } catch (IOException e) {
+          LOG.error("Unable to send block deletion ACK to SCM {}",
+              endPoint.getAddress().toString(), e);
+        }
+      }
+    }
+
+    long endTime = Time.monotonicNow();
+    totalTime += endTime - startTime;
+  }
+
+  /**
+   * Move a bunch of blocks from a container to deleting state.
+   * This is a meta update, the actual deletes happen in async mode.
+   *
+   * @param delTX a block deletion transaction.
+   * @param config configuration.
+   * @throws IOException if I/O error occurs.
+   */
+  private void deleteContainerBlocks(DeletedBlocksTransaction delTX,
+      Configuration config) throws IOException {
+    String containerId = delTX.getContainerName();
+    ContainerData containerInfo = containerManager.readContainer(containerId);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Processing Container : {}, DB path : {}", containerId,
+          containerInfo.getDBPath());
+    }
+
+    int newDeletionBlocks = 0;
+    MetadataStore containerDB = KeyUtils.getDB(containerInfo, config);
+    for (String blk : delTX.getBlockIDList()) {
+      BatchOperation batch = new BatchOperation();
+      byte[] blkBytes = DFSUtil.string2Bytes(blk);
+      byte[] blkInfo = containerDB.get(blkBytes);
+      if (blkInfo != null) {
+        // Found the block in container db,
+        // use an atomic update to change its state to deleting.
+        batch.put(DFSUtil.string2Bytes(OzoneConsts.DELETING_KEY_PREFIX + blk),
+            blkInfo);
+        batch.delete(blkBytes);
+        try {
+          containerDB.writeBatch(batch);
+          newDeletionBlocks++;
+          LOG.debug("Transited Block {} to DELETING state in container {}",
+              blk, containerId);
+        } catch (IOException e) {
+          // if some blocks failed to delete, we fail this TX,
+          // without sending this ACK to SCM, SCM will resend the TX
+          // with a certain number of retries.
+          throw new IOException(
+              "Failed to delete blocks for TXID = " + delTX.getTxID(), e);
+        }
+      } else {
+        LOG.debug("Block {} not found or already under deletion in"
+                + " container {}, skip deleting it.", blk, containerId);
+      }
+    }
+
+    // update pending deletion blocks count in in-memory container status
+    containerManager.incrPendingDeletionBlocks(newDeletionBlocks, containerId);
+  }
+
+  @Override
+  public SCMCmdType getCommandType() {
+    return SCMCmdType.deleteBlocksCommand;
+  }
+
+  @Override
+  public int getInvocationCount() {
+    return this.invocationCount;
+  }
+
+  @Override
+  public long getAverageRunTime() {
+    if (invocationCount > 0) {
+      return totalTime / invocationCount;
+    }
+    return 0;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java
new file mode 100644
index 0000000..1e9c8dc
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java
new file mode 100644
index 0000000..feb2f81
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/package-info.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine;
+/**
+
+ State machine class is used by the container to denote various states a
+ container can be in and also is used for command processing.
+
+ Container has the following states.
+
+ Start - > getVersion -> Register -> Running  -> Shutdown
+
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java
new file mode 100644
index 0000000..75142af
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/DatanodeState.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.states;
+
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * State Interface that allows tasks to maintain states.
+ */
+public interface DatanodeState<T> {
+  /**
+   * Called before entering this state.
+   */
+  void onEnter();
+
+  /**
+   * Called After exiting this state.
+   */
+  void onExit();
+
+  /**
+   * Executes one or more tasks that is needed by this state.
+   *
+   * @param executor -  ExecutorService
+   */
+  void execute(ExecutorService executor);
+
+  /**
+   * Wait for execute to finish.
+   *
+   * @param time - Time
+   * @param timeUnit - Unit of time.
+   */
+  T await(long time, TimeUnit timeUnit)
+      throws InterruptedException, ExecutionException, TimeoutException;
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java
new file mode 100644
index 0000000..f04d392
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.states.datanode;
+
+import com.google.common.base.Strings;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.HddsUtils;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .DatanodeStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.common.states.DatanodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.Collection;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import static org.apache.hadoop.hdds.HddsUtils.getSCMAddresses;
+
+/**
+ * Init Datanode State is the task that gets run when we are in Init State.
+ */
+public class InitDatanodeState implements DatanodeState,
+    Callable<DatanodeStateMachine.DatanodeStates> {
+  static final Logger LOG = LoggerFactory.getLogger(InitDatanodeState.class);
+  private final SCMConnectionManager connectionManager;
+  private final Configuration conf;
+  private final StateContext context;
+  private Future<DatanodeStateMachine.DatanodeStates> result;
+
+  /**
+   *  Create InitDatanodeState Task.
+   *
+   * @param conf - Conf
+   * @param connectionManager - Connection Manager
+   * @param context - Current Context
+   */
+  public InitDatanodeState(Configuration conf,
+                           SCMConnectionManager connectionManager,
+                           StateContext context) {
+    this.conf = conf;
+    this.connectionManager = connectionManager;
+    this.context = context;
+  }
+
+  /**
+   * Computes a result, or throws an exception if unable to do so.
+   *
+   * @return computed result
+   * @throws Exception if unable to compute a result
+   */
+  @Override
+  public DatanodeStateMachine.DatanodeStates call() throws Exception {
+    Collection<InetSocketAddress> addresses = null;
+    try {
+      addresses = getSCMAddresses(conf);
+    } catch (IllegalArgumentException e) {
+      if(!Strings.isNullOrEmpty(e.getMessage())) {
+        LOG.error("Failed to get SCM addresses: " + e.getMessage());
+      }
+      return DatanodeStateMachine.DatanodeStates.SHUTDOWN;
+    }
+
+    if (addresses == null || addresses.isEmpty()) {
+      LOG.error("Null or empty SCM address list found.");
+      return DatanodeStateMachine.DatanodeStates.SHUTDOWN;
+    } else {
+      for (InetSocketAddress addr : addresses) {
+        connectionManager.addSCMServer(addr);
+      }
+    }
+
+    // If datanode ID is set, persist it to the ID file.
+    persistContainerDatanodeDetails();
+
+    return this.context.getState().getNextState();
+  }
+
+  /**
+   * Persist DatanodeDetails to datanode.id file.
+   */
+  private void persistContainerDatanodeDetails() throws IOException {
+    String dataNodeIDPath = HddsUtils.getDatanodeIdFilePath(conf);
+    if (Strings.isNullOrEmpty(dataNodeIDPath)) {
+      LOG.error("A valid file path is needed for config setting {}",
+          ScmConfigKeys.OZONE_SCM_DATANODE_ID);
+      this.context.setState(DatanodeStateMachine.DatanodeStates.SHUTDOWN);
+      return;
+    }
+    File idPath = new File(dataNodeIDPath);
+    DatanodeDetails datanodeDetails = this.context.getParent()
+        .getDatanodeDetails();
+    if (datanodeDetails != null && !idPath.exists()) {
+      ContainerUtils.writeDatanodeDetailsTo(datanodeDetails, idPath);
+      LOG.info("DatanodeDetails is persisted to {}", dataNodeIDPath);
+    }
+  }
+
+  /**
+   * Called before entering this state.
+   */
+  @Override
+  public void onEnter() {
+    LOG.trace("Entering init container state");
+  }
+
+  /**
+   * Called After exiting this state.
+   */
+  @Override
+  public void onExit() {
+    LOG.trace("Exiting init container state");
+  }
+
+  /**
+   * Executes one or more tasks that is needed by this state.
+   *
+   * @param executor -  ExecutorService
+   */
+  @Override
+  public void execute(ExecutorService executor) {
+    result = executor.submit(this);
+  }
+
+  /**
+   * Wait for execute to finish.
+   *
+   * @param time     - Time
+   * @param timeUnit - Unit of time.
+   */
+  @Override
+  public DatanodeStateMachine.DatanodeStates await(long time,
+      TimeUnit timeUnit) throws InterruptedException,
+      ExecutionException, TimeoutException {
+    return result.get(time, timeUnit);
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java
new file mode 100644
index 0000000..7a8c17b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.states.datanode;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine.EndpointStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.common.states.DatanodeState;
+import org.apache.hadoop.ozone.container.common.states.endpoint.HeartbeatEndpointTask;
+import org.apache.hadoop.ozone.container.common.states.endpoint.RegisterEndpointTask;
+import org.apache.hadoop.ozone.container.common.states.endpoint.VersionEndpointTask;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CompletionService;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorCompletionService;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * Class that implements handshake with SCM.
+ */
+public class RunningDatanodeState implements DatanodeState {
+  static final Logger
+      LOG = LoggerFactory.getLogger(RunningDatanodeState.class);
+  private final SCMConnectionManager connectionManager;
+  private final Configuration conf;
+  private final StateContext context;
+  private CompletionService<EndpointStateMachine.EndPointStates> ecs;
+
+  public RunningDatanodeState(Configuration conf,
+      SCMConnectionManager connectionManager,
+      StateContext context) {
+    this.connectionManager = connectionManager;
+    this.conf = conf;
+    this.context = context;
+  }
+
+  /**
+   * Called before entering this state.
+   */
+  @Override
+  public void onEnter() {
+    LOG.trace("Entering handshake task.");
+  }
+
+  /**
+   * Called After exiting this state.
+   */
+  @Override
+  public void onExit() {
+    LOG.trace("Exiting handshake task.");
+  }
+
+  /**
+   * Executes one or more tasks that is needed by this state.
+   *
+   * @param executor -  ExecutorService
+   */
+  @Override
+  public void execute(ExecutorService executor) {
+    ecs = new ExecutorCompletionService<>(executor);
+    for (EndpointStateMachine endpoint : connectionManager.getValues()) {
+      Callable<EndpointStateMachine.EndPointStates> endpointTask
+          = getEndPointTask(endpoint);
+      ecs.submit(endpointTask);
+    }
+  }
+  //TODO : Cache some of these tasks instead of creating them
+  //all the time.
+  private Callable<EndpointStateMachine.EndPointStates>
+      getEndPointTask(EndpointStateMachine endpoint) {
+    switch (endpoint.getState()) {
+    case GETVERSION:
+      return new VersionEndpointTask(endpoint, conf);
+    case REGISTER:
+      return  RegisterEndpointTask.newBuilder()
+          .setConfig(conf)
+          .setEndpointStateMachine(endpoint)
+          .setDatanodeDetails(context.getParent().getDatanodeDetails())
+          .build();
+    case HEARTBEAT:
+      return HeartbeatEndpointTask.newBuilder()
+          .setConfig(conf)
+          .setEndpointStateMachine(endpoint)
+          .setDatanodeDetails(context.getParent().getDatanodeDetails())
+          .setContext(context)
+          .build();
+    case SHUTDOWN:
+      break;
+    default:
+      throw new IllegalArgumentException("Illegal Argument.");
+    }
+    return null;
+  }
+
+  /**
+   * Computes the next state the container state machine must move to by looking
+   * at all the state of endpoints.
+   * <p>
+   * if any endpoint state has moved to Shutdown, either we have an
+   * unrecoverable error or we have been told to shutdown. Either case the
+   * datanode state machine should move to Shutdown state, otherwise we
+   * remain in the Running state.
+   *
+   * @return next container state.
+   */
+  private DatanodeStateMachine.DatanodeStates
+      computeNextContainerState(
+      List<Future<EndpointStateMachine.EndPointStates>> results) {
+    for (Future<EndpointStateMachine.EndPointStates> state : results) {
+      try {
+        if (state.get() == EndpointStateMachine.EndPointStates.SHUTDOWN) {
+          // if any endpoint tells us to shutdown we move to shutdown state.
+          return DatanodeStateMachine.DatanodeStates.SHUTDOWN;
+        }
+      } catch (InterruptedException | ExecutionException e) {
+        LOG.error("Error in executing end point task.", e);
+      }
+    }
+    return DatanodeStateMachine.DatanodeStates.RUNNING;
+  }
+
+  /**
+   * Wait for execute to finish.
+   *
+   * @param duration - Time
+   * @param timeUnit - Unit of duration.
+   */
+  @Override
+  public DatanodeStateMachine.DatanodeStates
+      await(long duration, TimeUnit timeUnit)
+      throws InterruptedException, ExecutionException, TimeoutException {
+    int count = connectionManager.getValues().size();
+    int returned = 0;
+    long timeLeft = timeUnit.toMillis(duration);
+    long startTime = Time.monotonicNow();
+    List<Future<EndpointStateMachine.EndPointStates>> results = new
+        LinkedList<>();
+
+    while (returned < count && timeLeft > 0) {
+      Future<EndpointStateMachine.EndPointStates> result =
+          ecs.poll(timeLeft, TimeUnit.MILLISECONDS);
+      if (result != null) {
+        results.add(result);
+        returned++;
+      }
+      timeLeft = timeLeft - (Time.monotonicNow() - startTime);
+    }
+    return computeNextContainerState(results);
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java
new file mode 100644
index 0000000..6b8d16c
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.states.datanode;
+/**
+ This package contians files that guide the state transitions from
+ Init->Running->Shutdown for the datanode.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java
new file mode 100644
index 0000000..5dee10f
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java
@@ -0,0 +1,267 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.states.endpoint;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto;
+import org.apache.hadoop.ozone.container.common.helpers
+    .DeletedContainerBlocksSummary;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine.EndPointStates;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand;
+import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
+import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.time.ZonedDateTime;
+import java.util.concurrent.Callable;
+
+/**
+ * Heartbeat class for SCMs.
+ */
+public class HeartbeatEndpointTask
+    implements Callable<EndpointStateMachine.EndPointStates> {
+  static final Logger LOG =
+      LoggerFactory.getLogger(HeartbeatEndpointTask.class);
+  private final EndpointStateMachine rpcEndpoint;
+  private final Configuration conf;
+  private DatanodeDetailsProto datanodeDetailsProto;
+  private StateContext context;
+
+  /**
+   * Constructs a SCM heart beat.
+   *
+   * @param conf Config.
+   */
+  public HeartbeatEndpointTask(EndpointStateMachine rpcEndpoint,
+      Configuration conf, StateContext context) {
+    this.rpcEndpoint = rpcEndpoint;
+    this.conf = conf;
+    this.context = context;
+  }
+
+  /**
+   * Get the container Node ID proto.
+   *
+   * @return ContainerNodeIDProto
+   */
+  public DatanodeDetailsProto getDatanodeDetailsProto() {
+    return datanodeDetailsProto;
+  }
+
+  /**
+   * Set container node ID proto.
+   *
+   * @param datanodeDetailsProto - the node id.
+   */
+  public void setDatanodeDetailsProto(DatanodeDetailsProto
+      datanodeDetailsProto) {
+    this.datanodeDetailsProto = datanodeDetailsProto;
+  }
+
+  /**
+   * Computes a result, or throws an exception if unable to do so.
+   *
+   * @return computed result
+   * @throws Exception if unable to compute a result
+   */
+  @Override
+  public EndpointStateMachine.EndPointStates call() throws Exception {
+    rpcEndpoint.lock();
+    try {
+      Preconditions.checkState(this.datanodeDetailsProto != null);
+
+      SCMHeartbeatResponseProto reponse = rpcEndpoint.getEndPoint()
+          .sendHeartbeat(datanodeDetailsProto, this.context.getNodeReport(),
+              this.context.getContainerReportState());
+      processResponse(reponse, datanodeDetailsProto);
+      rpcEndpoint.setLastSuccessfulHeartbeat(ZonedDateTime.now());
+      rpcEndpoint.zeroMissedCount();
+    } catch (IOException ex) {
+      rpcEndpoint.logIfNeeded(ex);
+    } finally {
+      rpcEndpoint.unlock();
+    }
+    return rpcEndpoint.getState();
+  }
+
+  /**
+   * Returns a builder class for HeartbeatEndpointTask task.
+   * @return   Builder.
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Add this command to command processing Queue.
+   *
+   * @param response - SCMHeartbeat response.
+   */
+  private void processResponse(SCMHeartbeatResponseProto response,
+      final DatanodeDetailsProto datanodeDetails) {
+    for (SCMCommandResponseProto commandResponseProto : response
+        .getCommandsList()) {
+      // Verify the response is indeed for this datanode.
+      Preconditions.checkState(commandResponseProto.getDatanodeUUID()
+          .equalsIgnoreCase(datanodeDetails.getUuid()),
+          "Unexpected datanode ID in the response.");
+      switch (commandResponseProto.getCmdType()) {
+      case sendContainerReport:
+        this.context.addCommand(SendContainerCommand.getFromProtobuf(
+            commandResponseProto.getSendReport()));
+        break;
+      case reregisterCommand:
+        if (rpcEndpoint.getState() == EndPointStates.HEARTBEAT) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("Received SCM notification to register."
+                + " Interrupt HEARTBEAT and transit to REGISTER state.");
+          }
+          rpcEndpoint.setState(EndPointStates.REGISTER);
+        } else {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("Illegal state {} found, expecting {}.",
+                rpcEndpoint.getState().name(), EndPointStates.HEARTBEAT);
+          }
+        }
+        break;
+      case deleteBlocksCommand:
+        DeleteBlocksCommand db = DeleteBlocksCommand
+            .getFromProtobuf(commandResponseProto.getDeleteBlocksProto());
+        if (!db.blocksTobeDeleted().isEmpty()) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug(DeletedContainerBlocksSummary
+                .getFrom(db.blocksTobeDeleted())
+                .toString());
+          }
+          this.context.addCommand(db);
+        }
+        break;
+      case closeContainerCommand:
+        CloseContainerCommand closeContainer =
+            CloseContainerCommand.getFromProtobuf(
+                commandResponseProto.getCloseContainerProto());
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Received SCM container close request for container {}",
+              closeContainer.getContainerName());
+        }
+        this.context.addCommand(closeContainer);
+        break;
+      default:
+        throw new IllegalArgumentException("Unknown response : "
+            + commandResponseProto.getCmdType().name());
+      }
+    }
+  }
+
+  /**
+   * Builder class for HeartbeatEndpointTask.
+   */
+  public static class Builder {
+    private EndpointStateMachine endPointStateMachine;
+    private Configuration conf;
+    private DatanodeDetails datanodeDetails;
+    private StateContext context;
+
+    /**
+     * Constructs the builder class.
+     */
+    public Builder() {
+    }
+
+    /**
+     * Sets the endpoint state machine.
+     *
+     * @param rpcEndPoint - Endpoint state machine.
+     * @return Builder
+     */
+    public Builder setEndpointStateMachine(EndpointStateMachine rpcEndPoint) {
+      this.endPointStateMachine = rpcEndPoint;
+      return this;
+    }
+
+    /**
+     * Sets the Config.
+     *
+     * @param config - config
+     * @return Builder
+     */
+    public Builder setConfig(Configuration config) {
+      this.conf = config;
+      return this;
+    }
+
+    /**
+     * Sets the NodeID.
+     *
+     * @param dnDetails - NodeID proto
+     * @return Builder
+     */
+    public Builder setDatanodeDetails(DatanodeDetails dnDetails) {
+      this.datanodeDetails = dnDetails;
+      return this;
+    }
+
+    /**
+     * Sets the context.
+     * @param stateContext - State context.
+     * @return this.
+     */
+    public Builder setContext(StateContext stateContext) {
+      this.context = stateContext;
+      return this;
+    }
+
+    public HeartbeatEndpointTask build() {
+      if (endPointStateMachine == null) {
+        LOG.error("No endpoint specified.");
+        throw new IllegalArgumentException("A valid endpoint state machine is" +
+            " needed to construct HeartbeatEndpointTask task");
+      }
+
+      if (conf == null) {
+        LOG.error("No config specified.");
+        throw new IllegalArgumentException("A valid configration is needed to" +
+            " construct HeartbeatEndpointTask task");
+      }
+
+      if (datanodeDetails == null) {
+        LOG.error("No datanode specified.");
+        throw new IllegalArgumentException("A vaild Node ID is needed to " +
+            "construct HeartbeatEndpointTask task");
+      }
+
+      HeartbeatEndpointTask task = new HeartbeatEndpointTask(this
+          .endPointStateMachine, this.conf, this.context);
+      task.setDatanodeDetailsProto(datanodeDetails.getProtoBufMessage());
+      return task;
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java
new file mode 100644
index 0000000..ca3bef0
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/RegisterEndpointTask.java
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.states.endpoint;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.UUID;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Future;
+
+/**
+ * Register a container with SCM.
+ */
+public final class RegisterEndpointTask implements
+    Callable<EndpointStateMachine.EndPointStates> {
+  static final Logger LOG = LoggerFactory.getLogger(RegisterEndpointTask.class);
+
+  private final EndpointStateMachine rpcEndPoint;
+  private final Configuration conf;
+  private Future<EndpointStateMachine.EndPointStates> result;
+  private DatanodeDetails datanodeDetails;
+
+  /**
+   * Creates a register endpoint task.
+   *
+   * @param rpcEndPoint - endpoint
+   * @param conf - conf
+   */
+  @VisibleForTesting
+  public RegisterEndpointTask(EndpointStateMachine rpcEndPoint,
+      Configuration conf) {
+    this.rpcEndPoint = rpcEndPoint;
+    this.conf = conf;
+
+  }
+
+  /**
+   * Get the DatanodeDetails.
+   *
+   * @return DatanodeDetailsProto
+   */
+  public DatanodeDetails getDatanodeDetails() {
+    return datanodeDetails;
+  }
+
+  /**
+   * Set the contiainerNodeID Proto.
+   *
+   * @param datanodeDetails - Container Node ID.
+   */
+  public void setDatanodeDetails(
+      DatanodeDetails datanodeDetails) {
+    this.datanodeDetails = datanodeDetails;
+  }
+
+  /**
+   * Computes a result, or throws an exception if unable to do so.
+   *
+   * @return computed result
+   * @throws Exception if unable to compute a result
+   */
+  @Override
+  public EndpointStateMachine.EndPointStates call() throws Exception {
+
+    if (getDatanodeDetails() == null) {
+      LOG.error("DatanodeDetails cannot be null in RegisterEndpoint task, " +
+          "shutting down the endpoint.");
+      return rpcEndPoint.setState(EndpointStateMachine.EndPointStates.SHUTDOWN);
+    }
+
+    rpcEndPoint.lock();
+    try {
+
+      // TODO : Add responses to the command Queue.
+      SCMRegisteredCmdResponseProto response = rpcEndPoint.getEndPoint()
+          .register(datanodeDetails.getProtoBufMessage(),
+              conf.getStrings(ScmConfigKeys.OZONE_SCM_NAMES));
+      Preconditions.checkState(UUID.fromString(response.getDatanodeUUID())
+              .equals(datanodeDetails.getUuid()),
+          "Unexpected datanode ID in the response.");
+      Preconditions.checkState(!StringUtils.isBlank(response.getClusterID()),
+          "Invalid cluster ID in the response.");
+      if (response.hasHostname() && response.hasIpAddress()) {
+        datanodeDetails.setHostName(response.getHostname());
+        datanodeDetails.setIpAddress(response.getIpAddress());
+      }
+      EndpointStateMachine.EndPointStates nextState =
+          rpcEndPoint.getState().getNextState();
+      rpcEndPoint.setState(nextState);
+      rpcEndPoint.zeroMissedCount();
+    } catch (IOException ex) {
+      rpcEndPoint.logIfNeeded(ex
+      );
+    } finally {
+      rpcEndPoint.unlock();
+    }
+
+    return rpcEndPoint.getState();
+  }
+
+  /**
+   * Returns a builder class for RegisterEndPoint task.
+   *
+   * @return Builder.
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Builder class for RegisterEndPoint task.
+   */
+  public static class Builder {
+    private EndpointStateMachine endPointStateMachine;
+    private Configuration conf;
+    private DatanodeDetails datanodeDetails;
+
+    /**
+     * Constructs the builder class.
+     */
+    public Builder() {
+    }
+
+    /**
+     * Sets the endpoint state machine.
+     *
+     * @param rpcEndPoint - Endpoint state machine.
+     * @return Builder
+     */
+    public Builder setEndpointStateMachine(EndpointStateMachine rpcEndPoint) {
+      this.endPointStateMachine = rpcEndPoint;
+      return this;
+    }
+
+    /**
+     * Sets the Config.
+     *
+     * @param config - config
+     * @return Builder.
+     */
+    public Builder setConfig(Configuration config) {
+      this.conf = config;
+      return this;
+    }
+
+    /**
+     * Sets the NodeID.
+     *
+     * @param dnDetails - NodeID proto
+     * @return Builder
+     */
+    public Builder setDatanodeDetails(DatanodeDetails dnDetails) {
+      this.datanodeDetails = dnDetails;
+      return this;
+    }
+
+    public RegisterEndpointTask build() {
+      if (endPointStateMachine == null) {
+        LOG.error("No endpoint specified.");
+        throw new IllegalArgumentException("A valid endpoint state machine is" +
+            " needed to construct RegisterEndPoint task");
+      }
+
+      if (conf == null) {
+        LOG.error("No config specified.");
+        throw new IllegalArgumentException("A valid configration is needed to" +
+            " construct RegisterEndpoint task");
+      }
+
+      if (datanodeDetails == null) {
+        LOG.error("No datanode specified.");
+        throw new IllegalArgumentException("A vaild Node ID is needed to " +
+            "construct RegisterEndpoint task");
+      }
+
+      RegisterEndpointTask task = new RegisterEndpointTask(this
+          .endPointStateMachine, this.conf);
+      task.setDatanodeDetails(datanodeDetails);
+      return task;
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java
new file mode 100644
index 0000000..b048ee5
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.states.endpoint;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.ozone.protocol.VersionResponse;
+
+import java.io.IOException;
+import java.util.concurrent.Callable;
+
+/**
+ * Task that returns version.
+ */
+public class VersionEndpointTask implements
+    Callable<EndpointStateMachine.EndPointStates> {
+  private final EndpointStateMachine rpcEndPoint;
+  private final Configuration configuration;
+
+  public VersionEndpointTask(EndpointStateMachine rpcEndPoint,
+      Configuration conf) {
+    this.rpcEndPoint = rpcEndPoint;
+    this.configuration = conf;
+  }
+
+  /**
+   * Computes a result, or throws an exception if unable to do so.
+   *
+   * @return computed result
+   * @throws Exception if unable to compute a result
+   */
+  @Override
+  public EndpointStateMachine.EndPointStates call() throws Exception {
+    rpcEndPoint.lock();
+    try{
+      SCMVersionResponseProto versionResponse =
+          rpcEndPoint.getEndPoint().getVersion(null);
+      rpcEndPoint.setVersion(VersionResponse.getFromProtobuf(versionResponse));
+
+      EndpointStateMachine.EndPointStates nextState =
+          rpcEndPoint.getState().getNextState();
+      rpcEndPoint.setState(nextState);
+      rpcEndPoint.zeroMissedCount();
+    } catch (IOException ex) {
+      rpcEndPoint.logIfNeeded(ex);
+    } finally {
+      rpcEndPoint.unlock();
+    }
+    return rpcEndPoint.getState();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java
new file mode 100644
index 0000000..1122598
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/package-info.java
@@ -0,0 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common.states.endpoint;
+/**
+ This package contains code for RPC endpoints transitions.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java
new file mode 100644
index 0000000..92c953f
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.states;
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java
new file mode 100644
index 0000000..50e45b4
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServer.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.transport.server;
+
+import com.google.common.base.Preconditions;
+import io.netty.bootstrap.ServerBootstrap;
+import io.netty.channel.Channel;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.nio.NioServerSocketChannel;
+import io.netty.handler.logging.LogLevel;
+import io.netty.handler.logging.LoggingHandler;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.SocketAddress;
+
+/**
+ * Creates a netty server endpoint that acts as the communication layer for
+ * Ozone containers.
+ */
+public final class XceiverServer implements XceiverServerSpi {
+  private static final Logger
+      LOG = LoggerFactory.getLogger(XceiverServer.class);
+  private int port;
+  private final ContainerDispatcher storageContainer;
+
+  private EventLoopGroup bossGroup;
+  private EventLoopGroup workerGroup;
+  private Channel channel;
+
+  /**
+   * Constructs a netty server class.
+   *
+   * @param conf - Configuration
+   */
+  public XceiverServer(DatanodeDetails datanodeDetails, Configuration conf,
+                       ContainerDispatcher dispatcher) {
+    Preconditions.checkNotNull(conf);
+
+    this.port = conf.getInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+        OzoneConfigKeys.DFS_CONTAINER_IPC_PORT_DEFAULT);
+    // Get an available port on current node and
+    // use that as the container port
+    if (conf.getBoolean(OzoneConfigKeys.DFS_CONTAINER_IPC_RANDOM_PORT,
+        OzoneConfigKeys.DFS_CONTAINER_IPC_RANDOM_PORT_DEFAULT)) {
+      try (ServerSocket socket = new ServerSocket()) {
+        socket.setReuseAddress(true);
+        SocketAddress address = new InetSocketAddress(0);
+        socket.bind(address);
+        this.port = socket.getLocalPort();
+        LOG.info("Found a free port for the server : {}", this.port);
+      } catch (IOException e) {
+        LOG.error("Unable find a random free port for the server, "
+            + "fallback to use default port {}", this.port, e);
+      }
+    }
+    datanodeDetails.setContainerPort(port);
+    this.storageContainer = dispatcher;
+  }
+
+  @Override
+  public int getIPCPort() {
+    return this.port;
+  }
+
+  /**
+   * Returns the Replication type supported by this end-point.
+   *
+   * @return enum -- {Stand_Alone, Ratis, Chained}
+   */
+  @Override
+  public HddsProtos.ReplicationType getServerType() {
+    return HddsProtos.ReplicationType.STAND_ALONE;
+  }
+
+  @Override
+  public void start() throws IOException {
+    bossGroup = new NioEventLoopGroup();
+    workerGroup = new NioEventLoopGroup();
+    channel = new ServerBootstrap()
+        .group(bossGroup, workerGroup)
+        .channel(NioServerSocketChannel.class)
+        .handler(new LoggingHandler(LogLevel.INFO))
+        .childHandler(new XceiverServerInitializer(storageContainer))
+        .bind(port)
+        .syncUninterruptibly()
+        .channel();
+  }
+
+  @Override
+  public void stop() {
+    if (storageContainer != null) {
+      storageContainer.shutdown();
+    }
+    if (bossGroup != null) {
+      bossGroup.shutdownGracefully();
+    }
+    if (workerGroup != null) {
+      workerGroup.shutdownGracefully();
+    }
+    if (channel != null) {
+      channel.close().awaitUninterruptibly();
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java
new file mode 100644
index 0000000..5947dde
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerHandler.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.transport.server;
+
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Netty server handlers that respond to Network events.
+ */
+public class XceiverServerHandler extends
+    SimpleChannelInboundHandler<ContainerCommandRequestProto> {
+
+  static final Logger LOG = LoggerFactory.getLogger(XceiverServerHandler.class);
+  private final ContainerDispatcher dispatcher;
+
+  /**
+   * Constructor for server handler.
+   * @param dispatcher - Dispatcher interface
+   */
+  public XceiverServerHandler(ContainerDispatcher dispatcher) {
+    this.dispatcher = dispatcher;
+  }
+
+  /**
+   * <strong>Please keep in mind that this method will be renamed to {@code
+   * messageReceived(ChannelHandlerContext, I)} in 5.0.</strong>
+   * <p>
+   * Is called for each message of type {@link ContainerCommandRequestProto}.
+   *
+   * @param ctx the {@link ChannelHandlerContext} which this {@link
+   *            SimpleChannelInboundHandler} belongs to
+   * @param msg the message to handle
+   * @throws Exception is thrown if an error occurred
+   */
+  @Override
+  public void channelRead0(ChannelHandlerContext ctx,
+                           ContainerCommandRequestProto msg) throws
+      Exception {
+    ContainerCommandResponseProto response = this.dispatcher.dispatch(msg);
+    LOG.debug("Writing the reponse back to client.");
+    ctx.writeAndFlush(response);
+
+  }
+
+  /**
+   * Calls {@link ChannelHandlerContext#fireExceptionCaught(Throwable)}
+   * Sub-classes may override this method to change behavior.
+   *
+   * @param ctx   - Channel Handler Context
+   * @param cause - Exception
+   */
+  @Override
+  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
+      throws Exception {
+    LOG.error("An exception caught in the pipeline : " + cause.toString());
+    super.exceptionCaught(ctx, cause);
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java
new file mode 100644
index 0000000..78ba26b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerInitializer.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.transport.server;
+
+import com.google.common.base.Preconditions;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.handler.codec.protobuf.ProtobufDecoder;
+import io.netty.handler.codec.protobuf.ProtobufEncoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
+import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+
+/**
+ * Creates a channel for the XceiverServer.
+ */
+public class XceiverServerInitializer extends ChannelInitializer<SocketChannel>{
+  private final ContainerDispatcher dispatcher;
+  public XceiverServerInitializer(ContainerDispatcher dispatcher) {
+    Preconditions.checkNotNull(dispatcher);
+    this.dispatcher = dispatcher;
+  }
+
+  /**
+   * This method will be called once the Channel is registered. After
+   * the method returns this instance will be removed from the {@link
+   * ChannelPipeline}
+   *
+   * @param ch the  which was registered.
+   * @throws Exception is thrown if an error occurs. In that case the channel
+   * will be closed.
+   */
+  @Override
+  protected void initChannel(SocketChannel ch) throws Exception {
+    ChannelPipeline pipeline = ch.pipeline();
+    pipeline.addLast(new ProtobufVarint32FrameDecoder());
+    pipeline.addLast(new ProtobufDecoder(ContainerCommandRequestProto
+        .getDefaultInstance()));
+    pipeline.addLast(new ProtobufVarint32LengthFieldPrepender());
+    pipeline.addLast(new ProtobufEncoder());
+    pipeline.addLast(new XceiverServerHandler(dispatcher));
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java
new file mode 100644
index 0000000..dad9e9f
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerSpi.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.transport.server;
+
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.IOException;
+
+/** A server endpoint that acts as the communication layer for Ozone
+ * containers. */
+public interface XceiverServerSpi {
+  /** Starts the server. */
+  void start() throws IOException;
+
+  /** Stops a running server. */
+  void stop();
+
+  /** Get server IPC port. */
+  int getIPCPort();
+
+  /**
+   * Returns the Replication type supported by this end-point.
+   * @return enum -- {Stand_Alone, Ratis, Chained}
+   */
+  HddsProtos.ReplicationType getServerType();
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java
new file mode 100644
index 0000000..59c96f1
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.transport.server;
+
+/**
+ * This package contains classes for the server of the storage container
+ * protocol.
+ */
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
new file mode 100644
index 0000000..1a89e44b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
@@ -0,0 +1,293 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.transport.server.ratis;
+
+import com.google.common.base.Preconditions;
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .WriteChunkRequestProto;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+import org.apache.ratis.conf.RaftProperties;
+import org.apache.ratis.protocol.Message;
+import org.apache.ratis.protocol.RaftClientRequest;
+import org.apache.ratis.protocol.RaftPeerId;
+import org.apache.ratis.server.storage.RaftStorage;
+import org.apache.ratis.shaded.com.google.protobuf.ByteString;
+import org.apache.ratis.shaded.com.google.protobuf.ShadedProtoUtil;
+import org.apache.ratis.shaded.proto.RaftProtos.LogEntryProto;
+import org.apache.ratis.shaded.proto.RaftProtos.SMLogEntryProto;
+import org.apache.ratis.statemachine.StateMachineStorage;
+import org.apache.ratis.statemachine.TransactionContext;
+import org.apache.ratis.statemachine.impl.BaseStateMachine;
+import org.apache.ratis.statemachine.impl.SimpleStateMachineStorage;
+import org.apache.ratis.statemachine.impl.TransactionContextImpl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ThreadPoolExecutor;
+
+/** A {@link org.apache.ratis.statemachine.StateMachine} for containers.
+ *
+ * The stateMachine is responsible for handling different types of container
+ * requests. The container requests can be divided into readonly and write
+ * requests.
+ *
+ * Read only requests are classified in
+ * {@link org.apache.hadoop.hdds.scm.XceiverClientRatis#isReadOnly}
+ * and these readonly requests are replied from the {@link #query(Message)}.
+ *
+ * The write requests can be divided into requests with user data
+ * (WriteChunkRequest) and other request without user data.
+ *
+ * Inorder to optimize the write throughput, the writeChunk request is
+ * processed in 2 phases. The 2 phases are divided in
+ * {@link #startTransaction(RaftClientRequest)}, in the first phase the user
+ * data is written directly into the state machine via
+ * {@link #writeStateMachineData} and in the second phase the
+ * transaction is committed via {@link #applyTransaction(TransactionContext)}
+ *
+ * For the requests with no stateMachine data, the transaction is directly
+ * committed through
+ * {@link #applyTransaction(TransactionContext)}
+ *
+ * There are 2 ordering operation which are enforced right now in the code,
+ * 1) Write chunk operation are executed after the create container operation,
+ * the write chunk operation will fail otherwise as the container still hasn't
+ * been created. Hence the create container operation has been split in the
+ * {@link #startTransaction(RaftClientRequest)}, this will help in synchronizing
+ * the calls in {@link #writeStateMachineData}
+ *
+ * 2) Write chunk commit operation is executed after write chunk state machine
+ * operation. This will ensure that commit operation is sync'd with the state
+ * machine operation.
+ * */
+public class ContainerStateMachine extends BaseStateMachine {
+  static final Logger LOG = LoggerFactory.getLogger(
+      ContainerStateMachine.class);
+  private final SimpleStateMachineStorage storage
+      = new SimpleStateMachineStorage();
+  private final ContainerDispatcher dispatcher;
+  private ThreadPoolExecutor writeChunkExecutor;
+  private final ConcurrentHashMap<Long, CompletableFuture<Message>>
+      writeChunkFutureMap;
+  private final ConcurrentHashMap<String, CompletableFuture<Message>>
+      createContainerFutureMap;
+
+  ContainerStateMachine(ContainerDispatcher dispatcher,
+      ThreadPoolExecutor writeChunkExecutor) {
+    this.dispatcher = dispatcher;
+    this.writeChunkExecutor = writeChunkExecutor;
+    this.writeChunkFutureMap = new ConcurrentHashMap<>();
+    this.createContainerFutureMap = new ConcurrentHashMap<>();
+  }
+
+  @Override
+  public StateMachineStorage getStateMachineStorage() {
+    return storage;
+  }
+
+  @Override
+  public void initialize(
+      RaftPeerId id, RaftProperties properties, RaftStorage raftStorage)
+      throws IOException {
+    super.initialize(id, properties, raftStorage);
+    storage.init(raftStorage);
+    //  TODO handle snapshots
+
+    // TODO: Add a flag that tells you that initialize has been called.
+    // Check with Ratis if this feature is done in Ratis.
+  }
+
+  @Override
+  public TransactionContext startTransaction(RaftClientRequest request)
+      throws IOException {
+    final ContainerCommandRequestProto proto =
+        getRequestProto(request.getMessage().getContent());
+
+    final SMLogEntryProto log;
+    if (proto.getCmdType() == ContainerProtos.Type.WriteChunk) {
+      final WriteChunkRequestProto write = proto.getWriteChunk();
+      // create the state machine data proto
+      final WriteChunkRequestProto dataWriteChunkProto =
+          WriteChunkRequestProto
+              .newBuilder(write)
+              .setStage(ContainerProtos.Stage.WRITE_DATA)
+              .build();
+      ContainerCommandRequestProto dataContainerCommandProto =
+          ContainerCommandRequestProto
+              .newBuilder(proto)
+              .setWriteChunk(dataWriteChunkProto)
+              .build();
+
+      // create the log entry proto
+      final WriteChunkRequestProto commitWriteChunkProto =
+          WriteChunkRequestProto.newBuilder()
+              .setPipeline(write.getPipeline())
+              .setKeyName(write.getKeyName())
+              .setChunkData(write.getChunkData())
+              // skipping the data field as it is
+              // already set in statemachine data proto
+              .setStage(ContainerProtos.Stage.COMMIT_DATA)
+              .build();
+      ContainerCommandRequestProto commitContainerCommandProto =
+          ContainerCommandRequestProto
+              .newBuilder(proto)
+              .setWriteChunk(commitWriteChunkProto)
+              .build();
+
+      log = SMLogEntryProto.newBuilder()
+          .setData(getShadedByteString(commitContainerCommandProto))
+          .setStateMachineData(getShadedByteString(dataContainerCommandProto))
+          .build();
+    } else if (proto.getCmdType() == ContainerProtos.Type.CreateContainer) {
+      log = SMLogEntryProto.newBuilder()
+          .setData(request.getMessage().getContent())
+          .setStateMachineData(request.getMessage().getContent())
+          .build();
+    } else {
+      log = SMLogEntryProto.newBuilder()
+          .setData(request.getMessage().getContent())
+          .build();
+    }
+    return new TransactionContextImpl(this, request, log);
+  }
+
+  private ByteString getShadedByteString(ContainerCommandRequestProto proto) {
+    return ShadedProtoUtil.asShadedByteString(proto.toByteArray());
+  }
+
+  private ContainerCommandRequestProto getRequestProto(ByteString request)
+      throws InvalidProtocolBufferException {
+    return ContainerCommandRequestProto.parseFrom(
+        ShadedProtoUtil.asByteString(request));
+  }
+
+  private Message runCommand(ContainerCommandRequestProto requestProto) {
+    LOG.trace("dispatch {}", requestProto);
+    ContainerCommandResponseProto response = dispatcher.dispatch(requestProto);
+    LOG.trace("response {}", response);
+    return () -> ShadedProtoUtil.asShadedByteString(response.toByteArray());
+  }
+
+  private CompletableFuture<Message> handleWriteChunk(
+      ContainerCommandRequestProto requestProto, long entryIndex) {
+    final WriteChunkRequestProto write = requestProto.getWriteChunk();
+    String containerName = write.getPipeline().getContainerName();
+    CompletableFuture<Message> future =
+        createContainerFutureMap.get(containerName);
+    CompletableFuture<Message> writeChunkFuture;
+    if (future != null) {
+      writeChunkFuture = future.thenApplyAsync(
+          v -> runCommand(requestProto), writeChunkExecutor);
+    } else {
+      writeChunkFuture = CompletableFuture.supplyAsync(
+          () -> runCommand(requestProto), writeChunkExecutor);
+    }
+    writeChunkFutureMap.put(entryIndex, writeChunkFuture);
+    return writeChunkFuture;
+  }
+
+  private CompletableFuture<Message> handleCreateContainer(
+      ContainerCommandRequestProto requestProto) {
+    String containerName =
+        requestProto.getCreateContainer().getContainerData().getName();
+    createContainerFutureMap.
+        computeIfAbsent(containerName, k -> new CompletableFuture<>());
+    return CompletableFuture.completedFuture(() -> ByteString.EMPTY);
+  }
+
+  @Override
+  public CompletableFuture<Message> writeStateMachineData(LogEntryProto entry) {
+    try {
+      final ContainerCommandRequestProto requestProto =
+          getRequestProto(entry.getSmLogEntry().getStateMachineData());
+      ContainerProtos.Type cmdType = requestProto.getCmdType();
+      switch (cmdType) {
+      case CreateContainer:
+        return handleCreateContainer(requestProto);
+      case WriteChunk:
+        return handleWriteChunk(requestProto, entry.getIndex());
+      default:
+        throw new IllegalStateException("Cmd Type:" + cmdType
+            + " should not have state machine data");
+      }
+    } catch (IOException e) {
+      return completeExceptionally(e);
+    }
+  }
+
+  @Override
+  public CompletableFuture<Message> query(Message request) {
+    try {
+      final ContainerCommandRequestProto requestProto =
+          getRequestProto(request.getContent());
+      return CompletableFuture.completedFuture(runCommand(requestProto));
+    } catch (IOException e) {
+      return completeExceptionally(e);
+    }
+  }
+
+  @Override
+  public CompletableFuture<Message> applyTransaction(TransactionContext trx) {
+    try {
+      ContainerCommandRequestProto requestProto =
+          getRequestProto(trx.getSMLogEntry().getData());
+      ContainerProtos.Type cmdType = requestProto.getCmdType();
+
+      if (cmdType == ContainerProtos.Type.WriteChunk) {
+        WriteChunkRequestProto write = requestProto.getWriteChunk();
+        // the data field has already been removed in start Transaction
+        Preconditions.checkArgument(!write.hasData());
+        CompletableFuture<Message> stateMachineFuture =
+            writeChunkFutureMap.remove(trx.getLogEntry().getIndex());
+        return stateMachineFuture
+            .thenComposeAsync(v ->
+                CompletableFuture.completedFuture(runCommand(requestProto)));
+      } else {
+        Message message = runCommand(requestProto);
+        if (cmdType == ContainerProtos.Type.CreateContainer) {
+          String containerName =
+              requestProto.getCreateContainer().getContainerData().getName();
+          createContainerFutureMap.remove(containerName).complete(message);
+        }
+        return CompletableFuture.completedFuture(message);
+      }
+    } catch (IOException e) {
+      return completeExceptionally(e);
+    }
+  }
+
+  private static <T> CompletableFuture<T> completeExceptionally(Exception e) {
+    final CompletableFuture<T> future = new CompletableFuture<>();
+    future.completeExceptionally(e);
+    return future;
+  }
+
+  @Override
+  public void close() throws IOException {
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
new file mode 100644
index 0000000..4bd55f1
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.transport.server.ratis;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+import org.apache.hadoop.ozone.container.common.transport.server
+    .XceiverServerSpi;
+import org.apache.ratis.RaftConfigKeys;
+import org.apache.ratis.RatisHelper;
+import org.apache.ratis.conf.RaftProperties;
+import org.apache.ratis.grpc.GrpcConfigKeys;
+import org.apache.ratis.netty.NettyConfigKeys;
+import org.apache.ratis.rpc.RpcType;
+import org.apache.ratis.rpc.SupportedRpcType;
+import org.apache.ratis.server.RaftServer;
+import org.apache.ratis.server.RaftServerConfigKeys;
+import org.apache.ratis.util.SizeInBytes;
+import org.apache.ratis.util.TimeDuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.SocketAddress;
+import java.util.Objects;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Creates a ratis server endpoint that acts as the communication layer for
+ * Ozone containers.
+ */
+public final class XceiverServerRatis implements XceiverServerSpi {
+  static final Logger LOG = LoggerFactory.getLogger(XceiverServerRatis.class);
+  private final int port;
+  private final RaftServer server;
+  private ThreadPoolExecutor writeChunkExecutor;
+
+  private XceiverServerRatis(DatanodeDetails dd, int port, String storageDir,
+      ContainerDispatcher dispatcher, Configuration conf) throws IOException {
+
+    final String rpcType = conf.get(
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_KEY,
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_DEFAULT);
+    final RpcType rpc = SupportedRpcType.valueOfIgnoreCase(rpcType);
+    final int raftSegmentSize = conf.getInt(
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_SIZE_KEY,
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_SIZE_DEFAULT);
+    final int raftSegmentPreallocatedSize = conf.getInt(
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_KEY,
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_SEGMENT_PREALLOCATED_SIZE_DEFAULT);
+    final int maxChunkSize = OzoneConfigKeys.DFS_CONTAINER_CHUNK_MAX_SIZE;
+    final int numWriteChunkThreads = conf.getInt(
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_KEY,
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_NUM_WRITE_CHUNK_THREADS_DEFAULT);
+
+    Objects.requireNonNull(dd, "id == null");
+    this.port = port;
+    RaftProperties serverProperties = newRaftProperties(rpc, port,
+        storageDir, maxChunkSize, raftSegmentSize, raftSegmentPreallocatedSize);
+
+    writeChunkExecutor =
+        new ThreadPoolExecutor(numWriteChunkThreads, numWriteChunkThreads,
+            100, TimeUnit.SECONDS,
+            new ArrayBlockingQueue<>(1024),
+            new ThreadPoolExecutor.CallerRunsPolicy());
+    ContainerStateMachine stateMachine =
+        new ContainerStateMachine(dispatcher, writeChunkExecutor);
+    this.server = RaftServer.newBuilder()
+        .setServerId(RatisHelper.toRaftPeerId(dd))
+        .setGroup(RatisHelper.emptyRaftGroup())
+        .setProperties(serverProperties)
+        .setStateMachine(stateMachine)
+        .build();
+  }
+
+  private static RaftProperties newRaftProperties(
+      RpcType rpc, int port, String storageDir, int scmChunkSize,
+      int raftSegmentSize, int raftSegmentPreallocatedSize) {
+    final RaftProperties properties = new RaftProperties();
+    RaftServerConfigKeys.Log.Appender.setBatchEnabled(properties, true);
+    RaftServerConfigKeys.Log.Appender.setBufferCapacity(properties,
+        SizeInBytes.valueOf(raftSegmentPreallocatedSize));
+    RaftServerConfigKeys.Log.setWriteBufferSize(properties,
+        SizeInBytes.valueOf(scmChunkSize));
+    RaftServerConfigKeys.Log.setPreallocatedSize(properties,
+        SizeInBytes.valueOf(raftSegmentPreallocatedSize));
+    RaftServerConfigKeys.Log.setSegmentSizeMax(properties,
+        SizeInBytes.valueOf(raftSegmentSize));
+    RaftServerConfigKeys.setStorageDir(properties, new File(storageDir));
+    RaftConfigKeys.Rpc.setType(properties, rpc);
+
+    RaftServerConfigKeys.Log.setMaxCachedSegmentNum(properties, 2);
+    GrpcConfigKeys.setMessageSizeMax(properties,
+        SizeInBytes.valueOf(scmChunkSize + raftSegmentPreallocatedSize));
+    RaftServerConfigKeys.Rpc.setTimeoutMin(properties,
+        TimeDuration.valueOf(800, TimeUnit.MILLISECONDS));
+    RaftServerConfigKeys.Rpc.setTimeoutMax(properties,
+        TimeDuration.valueOf(1000, TimeUnit.MILLISECONDS));
+    if (rpc == SupportedRpcType.GRPC) {
+      GrpcConfigKeys.Server.setPort(properties, port);
+    } else if (rpc == SupportedRpcType.NETTY) {
+      NettyConfigKeys.Server.setPort(properties, port);
+    }
+    return properties;
+  }
+
+  public static XceiverServerRatis newXceiverServerRatis(
+      DatanodeDetails datanodeDetails, Configuration ozoneConf,
+      ContainerDispatcher dispatcher) throws IOException {
+    final String ratisDir = File.separator + "ratis";
+    int localPort = ozoneConf.getInt(
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_PORT,
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_PORT_DEFAULT);
+    String storageDir = ozoneConf.get(
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_DATANODE_STORAGE_DIR);
+
+    if (Strings.isNullOrEmpty(storageDir)) {
+      storageDir = ozoneConf.get(OzoneConfigKeys
+          .OZONE_METADATA_DIRS);
+      Preconditions.checkNotNull(storageDir, "ozone.metadata.dirs " +
+          "cannot be null, Please check your configs.");
+      storageDir = storageDir.concat(ratisDir);
+      LOG.warn("Storage directory for Ratis is not configured. Mapping Ratis " +
+              "storage under {}. It is a good idea to map this to an SSD disk.",
+          storageDir);
+    }
+
+    // Get an available port on current node and
+    // use that as the container port
+    if (ozoneConf.getBoolean(OzoneConfigKeys
+            .DFS_CONTAINER_RATIS_IPC_RANDOM_PORT,
+        OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT_DEFAULT)) {
+      try (ServerSocket socket = new ServerSocket()) {
+        socket.setReuseAddress(true);
+        SocketAddress address = new InetSocketAddress(0);
+        socket.bind(address);
+        localPort = socket.getLocalPort();
+        LOG.info("Found a free port for the server : {}", localPort);
+        // If we have random local ports configured this means that it
+        // probably running under MiniOzoneCluster. Ratis locks the storage
+        // directories, so we need to pass different local directory for each
+        // local instance. So we map ratis directories under datanode ID.
+        storageDir =
+            storageDir.concat(File.separator +
+                datanodeDetails.getUuidString());
+      } catch (IOException e) {
+        LOG.error("Unable find a random free port for the server, "
+            + "fallback to use default port {}", localPort, e);
+      }
+    }
+    datanodeDetails.setRatisPort(localPort);
+    return new XceiverServerRatis(datanodeDetails, localPort, storageDir,
+        dispatcher, ozoneConf);
+  }
+
+  @Override
+  public void start() throws IOException {
+    LOG.info("Starting {} {} at port {}", getClass().getSimpleName(),
+        server.getId(), getIPCPort());
+    writeChunkExecutor.prestartAllCoreThreads();
+    server.start();
+  }
+
+  @Override
+  public void stop() {
+    try {
+      writeChunkExecutor.shutdown();
+      server.close();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public int getIPCPort() {
+    return port;
+  }
+
+  /**
+   * Returns the Replication type supported by this end-point.
+   *
+   * @return enum -- {Stand_Alone, Ratis, Chained}
+   */
+  @Override
+  public HddsProtos.ReplicationType getServerType() {
+    return HddsProtos.ReplicationType.RATIS;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java
new file mode 100644
index 0000000..8debfe0
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.transport.server.ratis;
+
+/**
+ * This package contains classes for the server implementation
+ * using Apache Ratis
+ */
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java
new file mode 100644
index 0000000..6ae45b6
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.utils;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.collections.MapIterator;
+import org.apache.commons.collections.map.LRUMap;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * container cache is a LRUMap that maintains the DB handles.
+ */
+public final class ContainerCache extends LRUMap {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerCache.class);
+  private final Lock lock = new ReentrantLock();
+  private static ContainerCache cache;
+  private static final float LOAD_FACTOR = 0.75f;
+  /**
+   * Constructs a cache that holds DBHandle references.
+   */
+  private ContainerCache(int maxSize, float loadFactor, boolean
+      scanUntilRemovable) {
+    super(maxSize, loadFactor, scanUntilRemovable);
+  }
+
+  /**
+   * Return a singleton instance of {@link ContainerCache}
+   * that holds the DB handlers.
+   *
+   * @param conf - Configuration.
+   * @return A instance of {@link ContainerCache}.
+   */
+  public synchronized static ContainerCache getInstance(Configuration conf) {
+    if (cache == null) {
+      int cacheSize = conf.getInt(OzoneConfigKeys.OZONE_CONTAINER_CACHE_SIZE,
+          OzoneConfigKeys.OZONE_CONTAINER_CACHE_DEFAULT);
+      cache = new ContainerCache(cacheSize, LOAD_FACTOR, true);
+    }
+    return cache;
+  }
+
+  /**
+   * Closes a db instance.
+   *
+   * @param container - name of the container to be closed.
+   * @param db - db instance to close.
+   */
+  private void closeDB(String container, MetadataStore db) {
+    if (db != null) {
+      try {
+        db.close();
+      } catch (IOException e) {
+        LOG.error("Error closing DB. Container: " + container, e);
+      }
+    }
+  }
+
+  /**
+   * Closes all the db instances and resets the cache.
+   */
+  public void shutdownCache() {
+    lock.lock();
+    try {
+      // iterate the cache and close each db
+      MapIterator iterator = cache.mapIterator();
+      while (iterator.hasNext()) {
+        iterator.next();
+        MetadataStore db = (MetadataStore) iterator.getValue();
+        closeDB(iterator.getKey().toString(), db);
+      }
+      // reset the cache
+      cache.clear();
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected boolean removeLRU(LinkEntry entry) {
+    lock.lock();
+    try {
+      MetadataStore db = (MetadataStore) entry.getValue();
+      closeDB(entry.getKey().toString(), db);
+    } finally {
+      lock.unlock();
+    }
+    return true;
+  }
+
+  /**
+   * Returns a DB handle if available, create the handler otherwise.
+   *
+   * @param containerName - Name of the container.
+   * @return MetadataStore.
+   */
+  public MetadataStore getDB(String containerName, String containerDBPath)
+      throws IOException {
+    Preconditions.checkNotNull(containerName);
+    Preconditions.checkState(!containerName.isEmpty());
+    lock.lock();
+    try {
+      MetadataStore db = (MetadataStore) this.get(containerName);
+
+      if (db == null) {
+        db = MetadataStoreBuilder.newBuilder()
+            .setDbFile(new File(containerDBPath))
+            .setCreateIfMissing(false)
+            .build();
+        this.put(containerName, db);
+      }
+      return db;
+    } catch (Exception e) {
+      LOG.error("Error opening DB. Container:{} ContainerPath:{}",
+          containerName, containerDBPath, e);
+      throw e;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Remove a DB handler from cache.
+   *
+   * @param containerName - Name of the container.
+   */
+  public void removeDB(String containerName) {
+    Preconditions.checkNotNull(containerName);
+    Preconditions.checkState(!containerName.isEmpty());
+    lock.lock();
+    try {
+      MetadataStore db = (MetadataStore)this.get(containerName);
+      closeDB(containerName, db);
+      this.remove(containerName);
+    } finally {
+      lock.unlock();
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java
new file mode 100644
index 0000000..08264f0
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.utils;
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java
new file mode 100644
index 0000000..33a5971
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java
@@ -0,0 +1,277 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.ozoneimpl;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl;
+import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl;
+import org.apache.hadoop.ozone.container.common.impl.Dispatcher;
+import org.apache.hadoop.ozone.container.common.impl.KeyManagerImpl;
+import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.interfaces.KeyManager;
+import org.apache.hadoop.ozone.container.common.statemachine.background
+    .BlockDeletingService;
+import org.apache.hadoop.ozone.container.common.transport.server.XceiverServer;
+import org.apache.hadoop.ozone.container.common.transport.server
+    .XceiverServerSpi;
+import org.apache.hadoop.ozone.container.common.transport.server.ratis
+    .XceiverServerRatis;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_INTERVAL;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_TIMEOUT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX;
+import static org.apache.hadoop.ozone.OzoneConsts.INVALID_PORT;
+
+/**
+ * Ozone main class sets up the network server and initializes the container
+ * layer.
+ */
+public class OzoneContainer {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OzoneContainer.class);
+
+  private final Configuration ozoneConfig;
+  private final ContainerDispatcher dispatcher;
+  private final ContainerManager manager;
+  private final XceiverServerSpi[] server;
+  private final ChunkManager chunkManager;
+  private final KeyManager keyManager;
+  private final BlockDeletingService blockDeletingService;
+
+  /**
+   * Creates a network endpoint and enables Ozone container.
+   *
+   * @param ozoneConfig - Config
+   * @throws IOException
+   */
+  public OzoneContainer(
+      DatanodeDetails datanodeDetails, Configuration ozoneConfig)
+      throws IOException {
+    this.ozoneConfig = ozoneConfig;
+    List<StorageLocation> locations = new LinkedList<>();
+    String[] paths = ozoneConfig.getStrings(
+        OzoneConfigKeys.OZONE_METADATA_DIRS);
+    if (paths != null && paths.length > 0) {
+      for (String p : paths) {
+        locations.add(StorageLocation.parse(
+            Paths.get(p).resolve(CONTAINER_ROOT_PREFIX).toString()));
+      }
+    } else {
+      getDataDir(locations);
+    }
+
+    manager = new ContainerManagerImpl();
+    manager.init(this.ozoneConfig, locations, datanodeDetails);
+    this.chunkManager = new ChunkManagerImpl(manager);
+    manager.setChunkManager(this.chunkManager);
+
+    this.keyManager = new KeyManagerImpl(manager, ozoneConfig);
+    manager.setKeyManager(this.keyManager);
+
+    long svcInterval =
+        ozoneConfig.getTimeDuration(OZONE_BLOCK_DELETING_SERVICE_INTERVAL,
+        OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT, TimeUnit.MILLISECONDS);
+    long serviceTimeout = ozoneConfig.getTimeDuration(
+        OZONE_BLOCK_DELETING_SERVICE_TIMEOUT,
+        OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS);
+    this.blockDeletingService = new BlockDeletingService(manager,
+        svcInterval, serviceTimeout, ozoneConfig);
+
+    this.dispatcher = new Dispatcher(manager, this.ozoneConfig);
+
+    server = new XceiverServerSpi[]{
+        new XceiverServer(datanodeDetails, this.ozoneConfig, this.dispatcher),
+      XceiverServerRatis
+          .newXceiverServerRatis(datanodeDetails, this.ozoneConfig, dispatcher)
+    };
+  }
+
+  /**
+   * Starts serving requests to ozone container.
+   *
+   * @throws IOException
+   */
+  public void start() throws IOException {
+    for (XceiverServerSpi serverinstance : server) {
+      serverinstance.start();
+    }
+    blockDeletingService.start();
+    dispatcher.init();
+  }
+
+  /**
+   * Stops the ozone container.
+   * <p>
+   * Shutdown logic is not very obvious from the following code. if you need to
+   * modify the logic, please keep these comments in mind. Here is the shutdown
+   * sequence.
+   * <p>
+   * 1. We shutdown the network ports.
+   * <p>
+   * 2. Now we need to wait for all requests in-flight to finish.
+   * <p>
+   * 3. The container manager lock is a read-write lock with "Fairness"
+   * enabled.
+   * <p>
+   * 4. This means that the waiting threads are served in a "first-come-first
+   * -served" manner. Please note that this applies to waiting threads only.
+   * <p>
+   * 5. Since write locks are exclusive, if we are waiting to get a lock it
+   * implies that we are waiting for in-flight operations to complete.
+   * <p>
+   * 6. if there are other write operations waiting on the reader-writer lock,
+   * fairness guarantees that they will proceed before the shutdown lock
+   * request.
+   * <p>
+   * 7. Since all operations either take a reader or writer lock of container
+   * manager, we are guaranteed that we are the last operation since we have
+   * closed the network port, and we wait until close is successful.
+   * <p>
+   * 8. We take the writer lock and call shutdown on each of the managers in
+   * reverse order. That is chunkManager, keyManager and containerManager is
+   * shutdown.
+   */
+  public void stop() {
+    LOG.info("Attempting to stop container services.");
+    for(XceiverServerSpi serverinstance: server) {
+      serverinstance.stop();
+    }
+    dispatcher.shutdown();
+
+    try {
+      this.manager.writeLock();
+      this.chunkManager.shutdown();
+      this.keyManager.shutdown();
+      this.manager.shutdown();
+      this.blockDeletingService.shutdown();
+      LOG.info("container services shutdown complete.");
+    } catch (IOException ex) {
+      LOG.warn("container service shutdown error:", ex);
+    } finally {
+      this.manager.writeUnlock();
+    }
+  }
+
+  /**
+   * Returns a paths to data dirs.
+   *
+   * @param pathList - List of paths.
+   * @throws IOException
+   */
+  private void getDataDir(List<StorageLocation> pathList) throws IOException {
+    for (String dir : ozoneConfig.getStrings(DFS_DATANODE_DATA_DIR_KEY)) {
+      StorageLocation location = StorageLocation.parse(dir);
+      pathList.add(location);
+    }
+  }
+
+  /**
+   * Returns node report of container storage usage.
+   */
+  public SCMNodeReport getNodeReport() throws IOException {
+    return this.manager.getNodeReport();
+  }
+
+  private int getPortbyType(HddsProtos.ReplicationType replicationType) {
+    for (XceiverServerSpi serverinstance : server) {
+      if (serverinstance.getServerType() == replicationType) {
+        return serverinstance.getIPCPort();
+      }
+    }
+    return INVALID_PORT;
+  }
+
+  /**
+   * Returns the container server IPC port.
+   *
+   * @return Container server IPC port.
+   */
+  public int getContainerServerPort() {
+    return getPortbyType(HddsProtos.ReplicationType.STAND_ALONE);
+  }
+
+  /**
+   * Returns the Ratis container Server IPC port.
+   *
+   * @return Ratis port.
+   */
+  public int getRatisContainerServerPort() {
+    return getPortbyType(HddsProtos.ReplicationType.RATIS);
+  }
+
+  /**
+   * Returns container report.
+   * @return - container report.
+   * @throws IOException
+   */
+  public ContainerReportsRequestProto getContainerReport() throws IOException {
+    return this.manager.getContainerReport();
+  }
+
+// TODO: remove getContainerReports
+  /**
+   * Returns the list of closed containers.
+   * @return - List of closed containers.
+   * @throws IOException
+   */
+  public List<ContainerData> getContainerReports() throws IOException {
+    return this.manager.getContainerReports();
+  }
+
+  @VisibleForTesting
+  public ContainerManager getContainerManager() {
+    return this.manager;
+  }
+
+  /**
+   * Get the container report state to send via HB to SCM.
+   * @return the container report state.
+   */
+  public ReportState getContainerReportState() {
+    return this.manager.getContainerReportState();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java
new file mode 100644
index 0000000..c99c038
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.ozoneimpl;
+/**
+ Ozone main that calls into the container layer
+**/
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java
new file mode 100644
index 0000000..1a51012
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+/**
+ * Generic ozone specific classes.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java
new file mode 100644
index 0000000..43e7412
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.protocol;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos
+    .ContainerBlocksDeletionACKResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
+
+import java.io.IOException;
+
+/**
+ * The protocol spoken between datanodes and SCM. For specifics please the
+ * Protoc file that defines this protocol.
+ */
+@InterfaceAudience.Private
+public interface StorageContainerDatanodeProtocol {
+  /**
+   * Returns SCM version.
+   * @return Version info.
+   */
+  SCMVersionResponseProto getVersion(SCMVersionRequestProto versionRequest)
+      throws IOException;
+
+  /**
+   * Used by data node to send a Heartbeat.
+   * @param datanodeDetails - Datanode Details.
+   * @param nodeReport - node report state
+   * @param reportState - container report state.
+   * @return - SCMHeartbeatResponseProto
+   * @throws IOException
+   */
+  SCMHeartbeatResponseProto sendHeartbeat(DatanodeDetailsProto datanodeDetails,
+      SCMNodeReport nodeReport, ReportState reportState) throws IOException;
+
+  /**
+   * Register Datanode.
+   * @param datanodeDetails - Datanode Details.
+   * @param scmAddresses - List of SCMs this datanode is configured to
+   *                     communicate.
+   * @return SCM Command.
+   */
+  SCMRegisteredCmdResponseProto register(DatanodeDetailsProto datanodeDetails,
+      String[] scmAddresses) throws IOException;
+
+  /**
+   * Send a container report.
+   * @param reports -- Container report.
+   * @return container reports response.
+   * @throws IOException
+   */
+  ContainerReportsResponseProto sendContainerReport(
+      ContainerReportsRequestProto reports) throws IOException;
+
+  /**
+   * Used by datanode to send block deletion ACK to SCM.
+   * @param request block deletion transactions.
+   * @return block deletion transaction response.
+   * @throws IOException
+   */
+  ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK(
+      ContainerBlocksDeletionACKProto request) throws IOException;
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java
new file mode 100644
index 0000000..1fc7c57
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocol;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+
+import java.util.List;
+
+/**
+ * The protocol spoken between datanodes and SCM.
+ *
+ * Please note that the full protocol spoken between a datanode and SCM is
+ * separated into 2 interfaces. One interface that deals with node state and
+ * another interface that deals with containers.
+ *
+ * This interface has functions that deals with the state of datanode.
+ */
+@InterfaceAudience.Private
+public interface StorageContainerNodeProtocol {
+  /**
+   * Gets the version info from SCM.
+   * @param versionRequest - version Request.
+   * @return - returns SCM version info and other required information needed
+   * by datanode.
+   */
+  VersionResponse getVersion(SCMVersionRequestProto versionRequest);
+
+  /**
+   * Register the node if the node finds that it is not registered with any SCM.
+   * @param datanodeDetails DatanodeDetails
+   * @return  SCMHeartbeatResponseProto
+   */
+  SCMCommand register(DatanodeDetailsProto datanodeDetails);
+
+  /**
+   * Send heartbeat to indicate the datanode is alive and doing well.
+   * @param datanodeDetails - Datanode ID.
+   * @param nodeReport - node report.
+   * @param reportState - container report.
+   * @return SCMheartbeat response list
+   */
+  List<SCMCommand> sendHeartbeat(DatanodeDetailsProto datanodeDetails,
+      SCMNodeReport nodeReport, ReportState reportState);
+
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java
new file mode 100644
index 0000000..83acf5b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java
@@ -0,0 +1,150 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.protocol;
+
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Version response class.
+ */
+public class VersionResponse {
+  private final int version;
+  private final Map<String, String> values;
+
+  /**
+   * Creates a version response class.
+   * @param version
+   * @param values
+   */
+  public VersionResponse(int version, Map<String, String> values) {
+    this.version = version;
+    this.values = values;
+  }
+
+  /**
+   * Creates a version Response class.
+   * @param version
+   */
+  public VersionResponse(int version) {
+    this.version = version;
+    this.values = new HashMap<>();
+  }
+
+  /**
+   * Returns a new Builder.
+   * @return - Builder.
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Returns this class from protobuf message.
+   * @param response - SCMVersionResponseProto
+   * @return VersionResponse
+   */
+  public static VersionResponse getFromProtobuf(SCMVersionResponseProto
+                                                    response) {
+    return new VersionResponse(response.getSoftwareVersion(),
+        response.getKeysList().stream()
+            .collect(Collectors.toMap(KeyValue::getKey,
+                KeyValue::getValue)));
+  }
+
+  /**
+   * Adds a value to version Response.
+   * @param key - String
+   * @param value - String
+   */
+  public void put(String key, String value) {
+    if (this.values.containsKey(key)) {
+      throw new IllegalArgumentException("Duplicate key in version response");
+    }
+    values.put(key, value);
+  }
+
+  /**
+   * Return a protobuf message.
+   * @return SCMVersionResponseProto.
+   */
+  public SCMVersionResponseProto getProtobufMessage() {
+
+    List<KeyValue> list = new LinkedList<>();
+    for (Map.Entry<String, String> entry : values.entrySet()) {
+      list.add(KeyValue.newBuilder().setKey(entry.getKey()).
+          setValue(entry.getValue()).build());
+    }
+    return
+        SCMVersionResponseProto.newBuilder()
+            .setSoftwareVersion(this.version)
+            .addAllKeys(list).build();
+  }
+
+  /**
+   * Builder class.
+   */
+  public static class Builder {
+    private int version;
+    private Map<String, String> values;
+
+    Builder() {
+      values = new HashMap<>();
+    }
+
+    /**
+     * Sets the version.
+     * @param ver - version
+     * @return Builder
+     */
+    public Builder setVersion(int ver) {
+      this.version = ver;
+      return this;
+    }
+
+    /**
+     * Adds a value to version Response.
+     * @param key - String
+     * @param value - String
+     */
+    public Builder addValue(String key, String value) {
+      if (this.values.containsKey(key)) {
+        throw new IllegalArgumentException("Duplicate key in version response");
+      }
+      values.put(key, value);
+      return this;
+    }
+
+    /**
+     * Builds the version response.
+     * @return VersionResponse.
+     */
+    public VersionResponse build() {
+      return new VersionResponse(this.version, this.values);
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
new file mode 100644
index 0000000..b1cdbc4
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocol.commands;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCloseContainerCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+
+import static org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType.closeContainerCommand;
+
+/**
+ * Asks datanode to close a container.
+ */
+public class CloseContainerCommand
+    extends SCMCommand<SCMCloseContainerCmdResponseProto> {
+
+  private String containerName;
+
+  public CloseContainerCommand(String containerName) {
+    this.containerName = containerName;
+  }
+
+  /**
+   * Returns the type of this command.
+   *
+   * @return Type
+   */
+  @Override
+  public SCMCmdType getType() {
+    return closeContainerCommand;
+  }
+
+  /**
+   * Gets the protobuf message of this object.
+   *
+   * @return A protobuf message.
+   */
+  @Override
+  public byte[] getProtoBufMessage() {
+    return getProto().toByteArray();
+  }
+
+  public SCMCloseContainerCmdResponseProto getProto() {
+    return SCMCloseContainerCmdResponseProto.newBuilder()
+        .setContainerName(containerName).build();
+  }
+
+  public static CloseContainerCommand getFromProtobuf(
+      SCMCloseContainerCmdResponseProto closeContainerProto) {
+    Preconditions.checkNotNull(closeContainerProto);
+    return new CloseContainerCommand(closeContainerProto.getContainerName());
+
+  }
+
+  public String getContainerName() {
+    return containerName;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
new file mode 100644
index 0000000..a11ca25a
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocol.commands;
+
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMDeleteBlocksCmdResponseProto;
+
+import java.util.List;
+
+/**
+ * A SCM command asks a datanode to delete a number of blocks.
+ */
+public class DeleteBlocksCommand extends
+    SCMCommand<SCMDeleteBlocksCmdResponseProto> {
+
+  private List<DeletedBlocksTransaction> blocksTobeDeleted;
+
+
+  public DeleteBlocksCommand(List<DeletedBlocksTransaction> blocks) {
+    this.blocksTobeDeleted = blocks;
+  }
+
+  public List<DeletedBlocksTransaction> blocksTobeDeleted() {
+    return this.blocksTobeDeleted;
+  }
+
+  @Override
+  public SCMCmdType getType() {
+    return SCMCmdType.deleteBlocksCommand;
+  }
+
+  @Override
+  public byte[] getProtoBufMessage() {
+    return getProto().toByteArray();
+  }
+
+  public static DeleteBlocksCommand getFromProtobuf(
+      SCMDeleteBlocksCmdResponseProto deleteBlocksProto) {
+    return new DeleteBlocksCommand(deleteBlocksProto
+        .getDeletedBlocksTransactionsList());
+  }
+
+  public SCMDeleteBlocksCmdResponseProto getProto() {
+    return SCMDeleteBlocksCmdResponseProto.newBuilder()
+        .addAllDeletedBlocksTransactions(blocksTobeDeleted).build();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java
new file mode 100644
index 0000000..69f2c18
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java
@@ -0,0 +1,229 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocol.commands;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto
+    .ErrorCode;
+
+/**
+ * Response to Datanode Register call.
+ */
+public class RegisteredCommand extends
+    SCMCommand<SCMRegisteredCmdResponseProto> {
+  private String datanodeUUID;
+  private String clusterID;
+  private ErrorCode error;
+  private String hostname;
+  private String ipAddress;
+
+  public RegisteredCommand(final ErrorCode error, final String datanodeUUID,
+      final String clusterID) {
+    this(error, datanodeUUID, clusterID, null, null);
+  }
+  public RegisteredCommand(final ErrorCode error, final String datanodeUUID,
+      final String clusterID, final String hostname, final String ipAddress) {
+    this.datanodeUUID = datanodeUUID;
+    this.clusterID = clusterID;
+    this.error = error;
+    this.hostname = hostname;
+    this.ipAddress = ipAddress;
+  }
+
+  /**
+   * Returns a new builder.
+   *
+   * @return - Builder
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Returns the type of this command.
+   *
+   * @return Type
+   */
+  @Override
+  public SCMCmdType getType() {
+    return SCMCmdType.registeredCommand;
+  }
+
+  /**
+   * Returns datanode UUID.
+   *
+   * @return - Datanode ID.
+   */
+  public String getDatanodeUUID() {
+    return datanodeUUID;
+  }
+
+  /**
+   * Returns cluster ID.
+   *
+   * @return -- ClusterID
+   */
+  public String getClusterID() {
+    return clusterID;
+  }
+
+  /**
+   * Returns ErrorCode.
+   *
+   * @return - ErrorCode
+   */
+  public ErrorCode getError() {
+    return error;
+  }
+
+  /**
+   * Returns the hostname.
+   *
+   * @return - hostname
+   */
+  public String getHostName() {
+    return hostname;
+  }
+
+  /**
+   * Returns the ipAddress of the dataNode.
+   */
+  public String getIpAddress() {
+    return ipAddress;
+  }
+
+  /**
+   * Gets the protobuf message of this object.
+   *
+   * @return A protobuf message.
+   */
+  @Override
+  public byte[] getProtoBufMessage() {
+    SCMRegisteredCmdResponseProto.Builder builder =
+        SCMRegisteredCmdResponseProto.newBuilder()
+            .setClusterID(this.clusterID)
+            .setDatanodeUUID(this.datanodeUUID)
+            .setErrorCode(this.error);
+    if (hostname != null && ipAddress != null) {
+      builder.setHostname(hostname).setIpAddress(ipAddress);
+    }
+    return builder.build().toByteArray();
+  }
+
+  /**
+   * A builder class to verify all values are sane.
+   */
+  public static class Builder {
+    private String datanodeUUID;
+    private String clusterID;
+    private ErrorCode error;
+    private String ipAddress;
+    private String hostname;
+
+    /**
+     * sets UUID.
+     *
+     * @param dnUUID - datanode UUID
+     * @return Builder
+     */
+    public Builder setDatanodeUUID(String dnUUID) {
+      this.datanodeUUID = dnUUID;
+      return this;
+    }
+
+    /**
+     * Create this object from a Protobuf message.
+     *
+     * @param response - RegisteredCmdResponseProto
+     * @return RegisteredCommand
+     */
+    public  RegisteredCommand getFromProtobuf(SCMRegisteredCmdResponseProto
+                                                        response) {
+      Preconditions.checkNotNull(response);
+      if (response.hasHostname() && response.hasIpAddress()) {
+        return new RegisteredCommand(response.getErrorCode(),
+            response.getDatanodeUUID(), response.getClusterID(),
+            response.getHostname(), response.getIpAddress());
+      } else {
+        return new RegisteredCommand(response.getErrorCode(),
+            response.getDatanodeUUID(), response.getClusterID());
+      }
+    }
+
+    /**
+     * Sets cluster ID.
+     *
+     * @param cluster - clusterID
+     * @return Builder
+     */
+    public Builder setClusterID(String cluster) {
+      this.clusterID = cluster;
+      return this;
+    }
+
+    /**
+     * Sets Error code.
+     *
+     * @param errorCode - error code
+     * @return Builder
+     */
+    public Builder setErrorCode(ErrorCode errorCode) {
+      this.error = errorCode;
+      return this;
+    }
+
+    /**
+     * sets the hostname.
+     */
+    public Builder setHostname(String host) {
+      this.hostname = host;
+      return this;
+    }
+
+    public Builder setIpAddress(String ipAddr) {
+      this.ipAddress = ipAddr;
+      return this;
+    }
+
+    /**
+     * Build the command object.
+     *
+     * @return RegisteredCommand
+     */
+    public RegisteredCommand build() {
+      if ((this.error == ErrorCode.success) && (this.datanodeUUID == null
+          || this.datanodeUUID.isEmpty()) || (this.clusterID == null
+          || this.clusterID.isEmpty())) {
+        throw new IllegalArgumentException("On success, RegisteredCommand "
+            + "needs datanodeUUID and ClusterID.");
+      }
+      if (hostname != null && ipAddress != null) {
+        return new RegisteredCommand(this.error, this.datanodeUUID,
+            this.clusterID, this.hostname, this.ipAddress);
+      } else {
+        return new RegisteredCommand(this.error, this.datanodeUUID,
+            this.clusterID);
+      }
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
new file mode 100644
index 0000000..c167d59
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocol.commands;
+
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+
+import static org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType.reregisterCommand;
+import static org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMReregisterCmdResponseProto;
+
+/**
+ * Informs a datanode to register itself with SCM again.
+ */
+public class ReregisterCommand extends
+    SCMCommand<SCMReregisterCmdResponseProto>{
+
+  /**
+   * Returns the type of this command.
+   *
+   * @return Type
+   */
+  @Override
+  public SCMCmdType getType() {
+    return reregisterCommand;
+  }
+
+  /**
+   * Gets the protobuf message of this object.
+   *
+   * @return A protobuf message.
+   */
+  @Override
+  public byte[] getProtoBufMessage() {
+    return getProto().toByteArray();
+  }
+
+  public SCMReregisterCmdResponseProto getProto() {
+    return SCMReregisterCmdResponseProto
+        .newBuilder()
+        .build();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java
new file mode 100644
index 0000000..73e4194
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocol.commands;
+
+import com.google.protobuf.GeneratedMessage;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+
+/**
+ * A class that acts as the base class to convert between Java and SCM
+ * commands in protobuf format.
+ * @param <T>
+ */
+public abstract class SCMCommand<T extends GeneratedMessage> {
+  /**
+   * Returns the type of this command.
+   * @return Type
+   */
+  public  abstract SCMCmdType getType();
+
+  /**
+   * Gets the protobuf message of this object.
+   * @return A protobuf message.
+   */
+  public abstract byte[] getProtoBufMessage();
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java
new file mode 100644
index 0000000..8431752
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.protocol.commands;
+
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SendContainerReportProto;
+
+/**
+ * Allows a Datanode to send in the container report.
+ */
+public class SendContainerCommand extends SCMCommand<SendContainerReportProto> {
+  /**
+   * Returns a NullCommand class from NullCommandResponse Proto.
+   * @param unused  - unused
+   * @return NullCommand
+   */
+  public static SendContainerCommand getFromProtobuf(
+      final SendContainerReportProto unused) {
+    return new SendContainerCommand();
+  }
+
+  /**
+   * returns a new builder.
+   * @return Builder
+   */
+  public static SendContainerCommand.Builder newBuilder() {
+    return new SendContainerCommand.Builder();
+  }
+
+  /**
+   * Returns the type of this command.
+   *
+   * @return Type
+   */
+  @Override
+  public SCMCmdType getType() {
+    return SCMCmdType.sendContainerReport;
+  }
+
+  /**
+   * Gets the protobuf message of this object.
+   *
+   * @return A protobuf message.
+   */
+  @Override
+  public byte[] getProtoBufMessage() {
+    return SendContainerReportProto.newBuilder().build().toByteArray();
+  }
+
+  /**
+   * A Builder class this is the standard pattern we are using for all commands.
+   */
+  public static class Builder {
+    /**
+     * Return a null command.
+     * @return - NullCommand.
+     */
+    public SendContainerCommand build() {
+      return new SendContainerCommand();
+    }
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java
new file mode 100644
index 0000000..7083c1b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocol.commands;
+/**
+ Set of classes that help in protoc conversions.
+ **/
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java
new file mode 100644
index 0000000..a718fa7
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.protocol;
+
+/**
+ * This package contains classes for HDDS protocol definitions.
+ */
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java
new file mode 100644
index 0000000..12fed1c
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos
+    .ContainerBlocksDeletionACKResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisterRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
+import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.ProtocolTranslator;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * This class is the client-side translator to translate the requests made on
+ * the {@link StorageContainerDatanodeProtocol} interface to the RPC server
+ * implementing {@link StorageContainerDatanodeProtocolPB}.
+ */
+public class StorageContainerDatanodeProtocolClientSideTranslatorPB
+    implements StorageContainerDatanodeProtocol, ProtocolTranslator, Closeable {
+
+  /**
+   * RpcController is not used and hence is set to null.
+   */
+  private static final RpcController NULL_RPC_CONTROLLER = null;
+  private final StorageContainerDatanodeProtocolPB rpcProxy;
+
+  /**
+   * Constructs a Client side interface that calls into SCM datanode protocol.
+   *
+   * @param rpcProxy - Proxy for RPC.
+   */
+  public StorageContainerDatanodeProtocolClientSideTranslatorPB(
+      StorageContainerDatanodeProtocolPB rpcProxy) {
+    this.rpcProxy = rpcProxy;
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated with it. If
+   * the stream is already closed then invoking this method has no effect.
+   * <p>
+   * <p> As noted in {@link AutoCloseable#close()}, cases where the close may
+   * fail require careful attention. It is strongly advised to relinquish the
+   * underlying resources and to internally <em>mark</em> the {@code Closeable}
+   * as closed, prior to throwing the {@code IOException}.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+    RPC.stopProxy(rpcProxy);
+  }
+
+  /**
+   * Return the proxy object underlying this protocol translator.
+   *
+   * @return the proxy object underlying this protocol translator.
+   */
+  @Override
+  public Object getUnderlyingProxyObject() {
+    return rpcProxy;
+  }
+
+  /**
+   * Returns SCM version.
+   *
+   * @param unused - set to null and unused.
+   * @return Version info.
+   */
+  @Override
+  public SCMVersionResponseProto getVersion(SCMVersionRequestProto
+      unused) throws IOException {
+    SCMVersionRequestProto request =
+        SCMVersionRequestProto.newBuilder().build();
+    final SCMVersionResponseProto response;
+    try {
+      response = rpcProxy.getVersion(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException ex) {
+      throw ProtobufHelper.getRemoteException(ex);
+    }
+    return response;
+  }
+
+  /**
+   * Send by datanode to SCM.
+   *
+   * @param datanodeDetailsProto - Datanode Details
+   * @param nodeReport - node report
+   * @throws IOException
+   */
+
+  @Override
+  public SCMHeartbeatResponseProto sendHeartbeat(
+      DatanodeDetailsProto datanodeDetailsProto,
+      SCMNodeReport nodeReport, ReportState reportState) throws IOException {
+    SCMHeartbeatRequestProto.Builder req = SCMHeartbeatRequestProto
+        .newBuilder();
+    req.setDatanodeDetails(datanodeDetailsProto);
+    req.setNodeReport(nodeReport);
+    req.setContainerReportState(reportState);
+    final SCMHeartbeatResponseProto resp;
+    try {
+      resp = rpcProxy.sendHeartbeat(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    return resp;
+  }
+
+  /**
+   * Register Datanode.
+   *
+   * @param datanodeDetailsProto - Datanode Details
+   * @return SCM Command.
+   */
+  @Override
+  public SCMRegisteredCmdResponseProto register(
+      DatanodeDetailsProto datanodeDetailsProto,
+      String[] scmAddresses) throws IOException {
+    SCMRegisterRequestProto.Builder req =
+        SCMRegisterRequestProto.newBuilder();
+    req.setDatanodeDetails(datanodeDetailsProto);
+    final SCMRegisteredCmdResponseProto response;
+    try {
+      response = rpcProxy.register(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    return response;
+  }
+
+  /**
+   * Send a container report.
+   *
+   * @param reports -- Container report
+   * @return HeartbeatRespose.nullcommand.
+   * @throws IOException
+   */
+  @Override
+  public ContainerReportsResponseProto sendContainerReport(
+      ContainerReportsRequestProto reports) throws IOException {
+    final ContainerReportsResponseProto resp;
+    try {
+      resp = rpcProxy.sendContainerReport(NULL_RPC_CONTROLLER, reports);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    return resp;
+  }
+
+  @Override
+  public ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK(
+      ContainerBlocksDeletionACKProto deletedBlocks) throws IOException {
+    final ContainerBlocksDeletionACKResponseProto resp;
+    try {
+      resp = rpcProxy.sendContainerBlocksDeletionACK(NULL_RPC_CONTROLLER,
+          deletedBlocks);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    return resp;
+  }
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java
new file mode 100644
index 0000000..9b28b5a
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos
+    .StorageContainerDatanodeProtocolService;
+import org.apache.hadoop.ipc.ProtocolInfo;
+
+/**
+ * Protocol used from a datanode to StorageContainerManager.  This extends
+ * the Protocol Buffers service interface to add Hadoop-specific annotations.
+ */
+
+@ProtocolInfo(protocolName =
+    "org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol",
+    protocolVersion = 1)
+public interface StorageContainerDatanodeProtocolPB extends
+    StorageContainerDatanodeProtocolService.BlockingInterface {
+}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java
new file mode 100644
index 0000000..985b75a
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos
+    .ContainerBlocksDeletionACKResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto;
+import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol;
+
+import java.io.IOException;
+
+/**
+ * This class is the server-side translator that forwards requests received on
+ * {@link StorageContainerDatanodeProtocolPB} to the {@link
+ * StorageContainerDatanodeProtocol} server implementation.
+ */
+public class StorageContainerDatanodeProtocolServerSideTranslatorPB
+    implements StorageContainerDatanodeProtocolPB {
+
+  private final StorageContainerDatanodeProtocol impl;
+
+  public StorageContainerDatanodeProtocolServerSideTranslatorPB(
+      StorageContainerDatanodeProtocol impl) {
+    this.impl = impl;
+  }
+
+  @Override
+  public StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto
+      getVersion(RpcController controller,
+      StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto request)
+      throws ServiceException {
+    try {
+      return impl.getVersion(request);
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto
+      register(RpcController controller, StorageContainerDatanodeProtocolProtos
+      .SCMRegisterRequestProto request) throws ServiceException {
+    String[] addressArray = null;
+
+    if (request.hasAddressList()) {
+      addressArray = request.getAddressList().getAddressListList()
+          .toArray(new String[0]);
+    }
+
+    try {
+      return impl.register(request.getDatanodeDetails(), addressArray);
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public SCMHeartbeatResponseProto
+      sendHeartbeat(RpcController controller,
+      SCMHeartbeatRequestProto request) throws ServiceException {
+    try {
+      return impl.sendHeartbeat(request.getDatanodeDetails(),
+          request.getNodeReport(),
+          request.getContainerReportState());
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public ContainerReportsResponseProto sendContainerReport(
+      RpcController controller, ContainerReportsRequestProto request)
+      throws ServiceException {
+    try {
+      return impl.sendContainerReport(request);
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK(
+      RpcController controller, ContainerBlocksDeletionACKProto request)
+      throws ServiceException {
+    try {
+      return impl.sendContainerBlocksDeletionACK(request);
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
new file mode 100644
index 0000000..378a8f3
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.protocolPB;
diff --git a/hadoop-hdds/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto b/hadoop-hdds/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto
new file mode 100644
index 0000000..03b85e5
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/proto/StorageContainerDatanodeProtocol.proto
@@ -0,0 +1,353 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * These .proto interfaces are private and unstable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *unstable* .proto interface.
+ */
+
+option java_package = "org.apache.hadoop.hdds.protocol.proto";
+
+option java_outer_classname = "StorageContainerDatanodeProtocolProtos";
+
+option java_generic_services = true;
+
+option java_generate_equals_and_hash = true;
+
+package hadoop.hdds;
+
+import "hdds.proto";
+
+
+/**
+* This message is send by data node to indicate that it is alive or it is
+* registering with the node manager.
+*/
+message SCMHeartbeatRequestProto {
+  required DatanodeDetailsProto datanodeDetails = 1;
+  optional SCMNodeReport nodeReport = 2;
+  optional ReportState containerReportState = 3;
+}
+
+enum DatanodeContainerState {
+  closed = 0;
+  open = 1;
+}
+
+/**
+NodeState contains messages from datanode to SCM saying that it has
+some information that SCM might be interested in.*/
+message ReportState {
+  enum states {
+    noContainerReports = 0;
+    completeContinerReport = 1;
+    deltaContainerReport = 2;
+  }
+  required states state = 1;
+  required int64 count = 2 [default = 0];
+}
+
+
+/**
+This message is used to persist the information about a container in the
+SCM database, This information allows SCM to startup faster and avoid having
+all container info in memory all the time.
+  */
+message ContainerPersistanceProto {
+  required DatanodeContainerState state = 1;
+  required hadoop.hdds.Pipeline pipeline = 2;
+  required ContainerInfo info = 3;
+}
+
+/**
+This message is used to do a quick look up of which containers are effected
+if a node goes down
+*/
+message NodeContianerMapping {
+  repeated string contianerName = 1;
+}
+
+/**
+A container report contains the following information.
+*/
+message ContainerInfo {
+  required string containerName = 1;
+  optional string finalhash = 2;
+  optional int64 size = 3;
+  optional int64 used = 4;
+  optional int64 keyCount = 5;
+  // TODO: move the io count to separate message
+  optional int64 readCount = 6;
+  optional int64 writeCount = 7;
+  optional int64 readBytes = 8;
+  optional int64 writeBytes = 9;
+  required int64 containerID = 10;
+  optional hadoop.hdds.LifeCycleState state = 11;
+}
+
+// The deleted blocks which are stored in deletedBlock.db of scm.
+message DeletedBlocksTransaction {
+  required int64 txID = 1;
+  required string containerName = 2;
+  repeated string blockID = 3;
+  // the retry time of sending deleting command to datanode.
+  required int32 count = 4;
+}
+
+/**
+A set of container reports, max count is generally set to
+8192 since that keeps the size of the reports under 1 MB.
+*/
+message ContainerReportsRequestProto {
+  enum reportType {
+    fullReport = 0;
+    deltaReport = 1;
+  }
+  required DatanodeDetailsProto datanodeDetails = 1;
+  repeated ContainerInfo reports = 2;
+  required reportType type = 3;
+}
+
+message ContainerReportsResponseProto {
+}
+
+/**
+* This message is send along with the heart beat to report datanode
+* storage utilization by SCM.
+*/
+message SCMNodeReport {
+  repeated SCMStorageReport storageReport = 1;
+}
+
+message SCMStorageReport {
+  required string storageUuid = 1;
+  optional uint64 capacity = 2 [default = 0];
+  optional uint64 scmUsed = 3 [default = 0];
+  optional uint64 remaining = 4 [default = 0];
+  //optional hadoop.hdfs.StorageTypeProto storageType = 5 [default = DISK];
+}
+
+message SCMRegisterRequestProto {
+  required DatanodeDetailsProto datanodeDetails = 1;
+  optional SCMNodeAddressList addressList = 2;
+}
+
+/**
+ * Request for version info of the software stack on the server.
+ */
+message SCMVersionRequestProto {
+
+}
+
+/**
+* Generic response that is send to a version request. This allows keys to be
+* added on the fly and protocol to remain stable.
+*/
+message SCMVersionResponseProto {
+  required uint32 softwareVersion = 1;
+  repeated hadoop.hdds.KeyValue keys = 2;
+}
+
+message SCMNodeAddressList {
+  repeated string addressList = 1;
+}
+
+/**
+ * Datanode ID returned by the SCM. This is similar to name node
+ * registeration of a datanode.
+ */
+message SCMRegisteredCmdResponseProto {
+  enum ErrorCode {
+    success = 1;
+    errorNodeNotPermitted = 2;
+  }
+  required ErrorCode errorCode = 2;
+  required string datanodeUUID = 3;
+  required string clusterID = 4;
+  optional SCMNodeAddressList addressList = 5;
+  optional string hostname = 6;
+  optional string ipAddress = 7;
+}
+
+/**
+ * SCM informs a datanode to register itself again.
+ * With recieving this command, datanode will transit to REGISTER state.
+ */
+message SCMReregisterCmdResponseProto {}
+
+/**
+This command tells the data node to send in the container report when possible
+*/
+message SendContainerReportProto {
+}
+
+/**
+This command asks the datanode to close a specific container.
+*/
+message SCMCloseContainerCmdResponseProto {
+  required string containerName = 1;
+}
+
+/**
+Type of commands supported by SCM to datanode protocol.
+*/
+enum SCMCmdType {
+  versionCommand = 2;
+  registeredCommand = 3;
+  sendContainerReport = 4;
+  reregisterCommand = 5;
+  deleteBlocksCommand = 6;
+  closeContainerCommand = 7;
+}
+
+/*
+ * These are commands returned by SCM for to the datanode to execute.
+ */
+message SCMCommandResponseProto {
+  required SCMCmdType cmdType = 2; // Type of the command
+  optional SCMRegisteredCmdResponseProto registeredProto = 3;
+  optional SCMVersionResponseProto versionProto = 4;
+  optional SendContainerReportProto sendReport = 5;
+  optional SCMReregisterCmdResponseProto reregisterProto = 6;
+  optional SCMDeleteBlocksCmdResponseProto deleteBlocksProto = 7;
+  required string datanodeUUID = 8;
+  optional SCMCloseContainerCmdResponseProto closeContainerProto = 9;
+}
+
+
+/*
+ * A group of commands for the datanode to execute
+ */
+message SCMHeartbeatResponseProto {
+  repeated SCMCommandResponseProto commands = 1;
+}
+
+// HB response from SCM, contains a list of block deletion transactions.
+message SCMDeleteBlocksCmdResponseProto {
+  repeated DeletedBlocksTransaction deletedBlocksTransactions = 1;
+}
+
+// SendACK response returned by datanode to SCM, currently empty.
+message ContainerBlocksDeletionACKResponseProto {
+}
+
+// ACK message datanode sent to SCM, contains the result of
+// block deletion transactions.
+message ContainerBlocksDeletionACKProto {
+  message DeleteBlockTransactionResult {
+    required int64 txID = 1;
+    required bool success = 2;
+  }
+  repeated DeleteBlockTransactionResult results = 1;
+}
+
+/**
+ * Protocol used from a datanode to StorageContainerManager.
+ *
+ * Please see the request and response messages for details of the RPC calls.
+ *
+ * Here is a simple state diagram that shows how a datanode would boot up and
+ * communicate with SCM.
+ *
+ *           -----------------------
+ *          |         Start         |
+ *           ---------- ------------
+ *                     |
+ *                     |
+ *                     |
+ *                     |
+ *                     |
+ *                     |
+ *                     |
+ *           ----------v-------------
+ *          |   Searching for  SCM    ------------
+ *           ---------- -------------             |
+ *                     |                          |
+ *                     |                          |
+ *                     |                ----------v-------------
+ *                     |               | Register if needed     |
+ *                     |                ----------- ------------
+ *                     |                           |
+ *                     v                           |
+ *            ----------- ----------------         |
+ *  ---------   Heartbeat state           <--------
+ * |          --------^-------------------
+ * |                  |
+ * |                  |
+ * |                  |
+ * |                  |
+ * |                  |
+ * |                  |
+ * |                  |
+ *  ------------------
+ *
+ *
+ *
+ * Here is how this protocol is used by the datanode. When a datanode boots up
+ * it moves into a stated called SEARCHING_SCM. In this state datanode is
+ * trying to establish communication with the SCM. The address of the SCMs are
+ * retrieved from the configuration information.
+ *
+ * In the SEARCHING_SCM state, only rpc call made by datanode is a getVersion
+ * call to SCM. Once any of the SCMs reply, datanode checks if it has a local
+ * persisted datanode ID. If it has this means that this datanode is already
+ * registered with some SCM. If this file is not found, datanode assumes that
+ * it needs to do a registration.
+ *
+ * If registration is need datanode moves into REGISTER state. It will
+ * send a register call with DatanodeDetailsProto data structure and presist
+ * that info.
+ *
+ * The response to the command contains clusterID. This information is
+ * also persisted by the datanode and moves into heartbeat state.
+ *
+ * Once in the heartbeat state, datanode sends heartbeats and container reports
+ * to SCM and process commands issued by SCM until it is shutdown.
+ *
+ */
+service StorageContainerDatanodeProtocolService {
+
+  /**
+  * Gets the version information from the SCM.
+  */
+  rpc getVersion (SCMVersionRequestProto) returns (SCMVersionResponseProto);
+
+  /**
+  * Registers a data node with SCM.
+  */
+  rpc register (SCMRegisterRequestProto) returns (SCMRegisteredCmdResponseProto);
+
+  /**
+   * Send heartbeat from datanode to SCM. HB's under SCM looks more
+   * like life line protocol than HB's under HDFS. In other words, it is
+   * extremely light weight and contains no data payload.
+   */
+  rpc sendHeartbeat (SCMHeartbeatRequestProto) returns (SCMHeartbeatResponseProto);
+
+  /**
+    send container reports sends the container report to SCM. This will
+    return a null command as response.
+  */
+  rpc sendContainerReport(ContainerReportsRequestProto) returns (ContainerReportsResponseProto);
+
+  /**
+   * Sends the block deletion ACK to SCM.
+   */
+  rpc sendContainerBlocksDeletionACK (ContainerBlocksDeletionACKProto) returns (ContainerBlocksDeletionACKResponseProto);
+}
diff --git a/hadoop-hdds/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider b/hadoop-hdds/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider
new file mode 100644
index 0000000..2e103fe
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.hadoop.ozone.web.netty.ObjectStoreJerseyContainerProvider
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java
new file mode 100644
index 0000000..923440e
--- /dev/null
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ContainerTestUtils.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.common;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.retry.RetryPolicies;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.ozone.protocolPB
+    .StorageContainerDatanodeProtocolClientSideTranslatorPB;
+import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.net.InetSocketAddress;
+
+/**
+ * Helper utility to test containers.
+ */
+public final class ContainerTestUtils {
+
+  private ContainerTestUtils() {
+  }
+
+  /**
+   * Creates an Endpoint class for testing purpose.
+   *
+   * @param conf - Conf
+   * @param address - InetAddres
+   * @param rpcTimeout - rpcTimeOut
+   * @return EndPoint
+   * @throws Exception
+   */
+  public static EndpointStateMachine createEndpoint(Configuration conf,
+      InetSocketAddress address, int rpcTimeout) throws Exception {
+    RPC.setProtocolEngine(conf, StorageContainerDatanodeProtocolPB.class,
+        ProtobufRpcEngine.class);
+    long version =
+        RPC.getProtocolVersion(StorageContainerDatanodeProtocolPB.class);
+
+    StorageContainerDatanodeProtocolPB rpcProxy = RPC.getProtocolProxy(
+        StorageContainerDatanodeProtocolPB.class, version,
+        address, UserGroupInformation.getCurrentUser(), conf,
+        NetUtils.getDefaultSocketFactory(conf), rpcTimeout,
+        RetryPolicies.TRY_ONCE_THEN_FAIL).getProxy();
+
+    StorageContainerDatanodeProtocolClientSideTranslatorPB rpcClient =
+        new StorageContainerDatanodeProtocolClientSideTranslatorPB(rpcProxy);
+    return new EndpointStateMachine(address, rpcClient, conf);
+  }
+}
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java
new file mode 100644
index 0000000..b63c5fb
--- /dev/null
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/SCMTestUtils.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common;
+
+import com.google.protobuf.BlockingService;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos
+    .StorageContainerDatanodeProtocolService;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol;
+import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB;
+import org.apache.hadoop.ozone.protocolPB
+    .StorageContainerDatanodeProtocolServerSideTranslatorPB;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+
+/**
+ * Test Endpoint class.
+ */
+public final class SCMTestUtils {
+  /**
+   * Never constructed.
+   */
+  private SCMTestUtils() {
+  }
+
+  /**
+   * Starts an RPC server, if configured.
+   *
+   * @param conf configuration
+   * @param addr configured address of RPC server
+   * @param protocol RPC protocol provided by RPC server
+   * @param instance RPC protocol implementation instance
+   * @param handlerCount RPC server handler count
+   * @return RPC server
+   * @throws IOException if there is an I/O error while creating RPC server
+   */
+  private static RPC.Server startRpcServer(Configuration conf,
+      InetSocketAddress addr, Class<?>
+      protocol, BlockingService instance, int handlerCount)
+      throws IOException {
+    RPC.Server rpcServer = new RPC.Builder(conf)
+        .setProtocol(protocol)
+        .setInstance(instance)
+        .setBindAddress(addr.getHostString())
+        .setPort(addr.getPort())
+        .setNumHandlers(handlerCount)
+        .setVerbose(false)
+        .setSecretManager(null)
+        .build();
+
+    DFSUtil.addPBProtocol(conf, protocol, instance, rpcServer);
+    return rpcServer;
+  }
+
+
+  /**
+   * Start Datanode RPC server.
+   */
+  public static RPC.Server startScmRpcServer(Configuration configuration,
+      StorageContainerDatanodeProtocol server,
+      InetSocketAddress rpcServerAddresss, int handlerCount) throws
+      IOException {
+    RPC.setProtocolEngine(configuration,
+        StorageContainerDatanodeProtocolPB.class,
+        ProtobufRpcEngine.class);
+
+    BlockingService scmDatanodeService =
+        StorageContainerDatanodeProtocolService.
+            newReflectiveBlockingService(
+                new StorageContainerDatanodeProtocolServerSideTranslatorPB(
+                    server));
+
+    RPC.Server scmServer = startRpcServer(configuration, rpcServerAddresss,
+        StorageContainerDatanodeProtocolPB.class, scmDatanodeService,
+        handlerCount);
+
+    scmServer.start();
+    return scmServer;
+  }
+
+  public static InetSocketAddress getReuseableAddress() throws IOException {
+    try (ServerSocket socket = new ServerSocket(0)) {
+      socket.setReuseAddress(true);
+      int port = socket.getLocalPort();
+      String addr = InetAddress.getLoopbackAddress().getHostAddress();
+      return new InetSocketAddress(addr, port);
+    }
+  }
+
+  public static Configuration getConf() {
+    return new Configuration();
+  }
+
+  public static OzoneConfiguration getOzoneConf() {
+    return new OzoneConfiguration();
+  }
+
+}
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java
new file mode 100644
index 0000000..41a8a80
--- /dev/null
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java
@@ -0,0 +1,274 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.VersionInfo;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos
+    .ContainerBlocksDeletionACKResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerInfo;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol;
+import org.apache.hadoop.ozone.protocol.VersionResponse;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * SCM RPC mock class.
+ */
+public class ScmTestMock implements StorageContainerDatanodeProtocol {
+  private int rpcResponseDelay;
+  private AtomicInteger heartbeatCount = new AtomicInteger(0);
+  private AtomicInteger rpcCount = new AtomicInteger(0);
+  private ReportState reportState;
+  private AtomicInteger containerReportsCount = new AtomicInteger(0);
+
+  // Map of datanode to containers
+  private Map<DatanodeDetails, Map<String, ContainerInfo>> nodeContainers =
+      new HashMap();
+  /**
+   * Returns the number of heartbeats made to this class.
+   *
+   * @return int
+   */
+  public int getHeartbeatCount() {
+    return heartbeatCount.get();
+  }
+
+  /**
+   * Returns the number of RPC calls made to this mock class instance.
+   *
+   * @return - Number of RPC calls serviced by this class.
+   */
+  public int getRpcCount() {
+    return rpcCount.get();
+  }
+
+  /**
+   * Gets the RPC response delay.
+   *
+   * @return delay in milliseconds.
+   */
+  public int getRpcResponseDelay() {
+    return rpcResponseDelay;
+  }
+
+  /**
+   * Sets the RPC response delay.
+   *
+   * @param rpcResponseDelay - delay in milliseconds.
+   */
+  public void setRpcResponseDelay(int rpcResponseDelay) {
+    this.rpcResponseDelay = rpcResponseDelay;
+  }
+
+  /**
+   * Returns the number of container reports server has seen.
+   * @return int
+   */
+  public int getContainerReportsCount() {
+    return containerReportsCount.get();
+  }
+
+  /**
+   * Returns the number of containers that have been reported so far.
+   * @return - count of reported containers.
+   */
+  public long getContainerCount() {
+    return nodeContainers.values().parallelStream().mapToLong((containerMap)->{
+      return containerMap.size();
+    }).sum();
+  }
+
+  /**
+   * Get the number keys reported from container reports.
+   * @return - number of keys reported.
+   */
+  public long getKeyCount() {
+    return nodeContainers.values().parallelStream().mapToLong((containerMap)->{
+      return containerMap.values().parallelStream().mapToLong((container) -> {
+        return container.getKeyCount();
+      }).sum();
+    }).sum();
+  }
+
+  /**
+   * Get the number of bytes used from container reports.
+   * @return - number of bytes used.
+   */
+  public long getBytesUsed() {
+    return nodeContainers.values().parallelStream().mapToLong((containerMap)->{
+      return containerMap.values().parallelStream().mapToLong((container) -> {
+        return container.getUsed();
+      }).sum();
+    }).sum();
+  }
+
+  /**
+   * Returns SCM version.
+   *
+   * @return Version info.
+   */
+  @Override
+  public StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto
+      getVersion(StorageContainerDatanodeProtocolProtos
+      .SCMVersionRequestProto unused) throws IOException {
+    rpcCount.incrementAndGet();
+    sleepIfNeeded();
+    VersionInfo versionInfo = VersionInfo.getLatestVersion();
+    return VersionResponse.newBuilder()
+        .setVersion(versionInfo.getVersion())
+        .addValue(VersionInfo.DESCRIPTION_KEY, versionInfo.getDescription())
+        .build().getProtobufMessage();
+  }
+
+  private void sleepIfNeeded() {
+    if (getRpcResponseDelay() > 0) {
+      try {
+        Thread.sleep(getRpcResponseDelay());
+      } catch (InterruptedException ex) {
+        // Just ignore this exception.
+      }
+    }
+  }
+
+  /**
+   * Used by data node to send a Heartbeat.
+   *
+   * @param datanodeDetailsProto - DatanodeDetailsProto.
+   * @param nodeReport - node report.
+   * @return - SCMHeartbeatResponseProto
+   * @throws IOException
+   */
+  @Override
+  public StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto
+      sendHeartbeat(DatanodeDetailsProto datanodeDetailsProto,
+                    SCMNodeReport nodeReport, ReportState scmReportState)
+      throws IOException {
+    rpcCount.incrementAndGet();
+    heartbeatCount.incrementAndGet();
+    this.reportState = scmReportState;
+    sleepIfNeeded();
+    List<SCMCommandResponseProto>
+        cmdResponses = new LinkedList<>();
+    return SCMHeartbeatResponseProto.newBuilder().addAllCommands(cmdResponses)
+        .build();
+  }
+
+  /**
+   * Register Datanode.
+   *
+   * @param datanodeDetailsProto DatanodDetailsProto.
+   * @param scmAddresses - List of SCMs this datanode is configured to
+   * communicate.
+   * @return SCM Command.
+   */
+  @Override
+  public StorageContainerDatanodeProtocolProtos
+      .SCMRegisteredCmdResponseProto register(
+          DatanodeDetailsProto datanodeDetailsProto, String[] scmAddresses)
+      throws IOException {
+    rpcCount.incrementAndGet();
+    sleepIfNeeded();
+    return StorageContainerDatanodeProtocolProtos
+        .SCMRegisteredCmdResponseProto
+        .newBuilder().setClusterID(UUID.randomUUID().toString())
+        .setDatanodeUUID(datanodeDetailsProto.getUuid()).setErrorCode(
+            StorageContainerDatanodeProtocolProtos
+                .SCMRegisteredCmdResponseProto.ErrorCode.success).build();
+  }
+
+  /**
+   * Send a container report.
+   *
+   * @param reports -- Container report
+   * @return HeartbeatResponse.nullcommand.
+   * @throws IOException
+   */
+  @Override
+  public StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto
+      sendContainerReport(StorageContainerDatanodeProtocolProtos
+      .ContainerReportsRequestProto reports) throws IOException {
+    Preconditions.checkNotNull(reports);
+    containerReportsCount.incrementAndGet();
+
+    DatanodeDetails datanode = DatanodeDetails.getFromProtoBuf(
+        reports.getDatanodeDetails());
+    if (reports.getReportsCount() > 0) {
+      Map containers = nodeContainers.get(datanode);
+      if (containers == null) {
+        containers = new LinkedHashMap();
+        nodeContainers.put(datanode, containers);
+      }
+
+      for (StorageContainerDatanodeProtocolProtos.ContainerInfo report:
+          reports.getReportsList()) {
+        containers.put(report.getContainerName(), report);
+      }
+    }
+
+    return StorageContainerDatanodeProtocolProtos
+        .ContainerReportsResponseProto.newBuilder().build();
+  }
+
+  @Override
+  public ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK(
+      ContainerBlocksDeletionACKProto request) throws IOException {
+    return ContainerBlocksDeletionACKResponseProto
+        .newBuilder().getDefaultInstanceForType();
+  }
+
+  public ReportState getReportState() {
+    return this.reportState;
+  }
+
+  /**
+   * Reset the mock Scm for test to get a fresh start without rebuild MockScm.
+   */
+  public void reset() {
+    heartbeatCount.set(0);
+    rpcCount.set(0);
+    reportState = ReportState.newBuilder()
+        .setState(ReportState.states.noContainerReports)
+        .setCount(0).build();
+    containerReportsCount.set(0);
+    nodeContainers.clear();
+
+  }
+}
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java
new file mode 100644
index 0000000..9a9aab1
--- /dev/null
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestDatanodeStateMachine.java
@@ -0,0 +1,379 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common;
+
+import com.google.common.collect.Maps;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .DatanodeStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .SCMConnectionManager;
+import org.apache.hadoop.ozone.container.common.states.DatanodeState;
+import org.apache.hadoop.ozone.container.common.states.datanode
+    .InitDatanodeState;
+import org.apache.hadoop.ozone.container.common.states.datanode
+    .RunningDatanodeState;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.concurrent.HadoopExecutors;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.file.Paths;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_RPC_TIMEOUT;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests the datanode state machine class and its states.
+ */
+public class TestDatanodeStateMachine {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestDatanodeStateMachine.class);
+  private final int scmServerCount = 3;
+  private List<String> serverAddresses;
+  private List<RPC.Server> scmServers;
+  private List<ScmTestMock> mockServers;
+  private ExecutorService executorService;
+  private Configuration conf;
+  private File testRoot;
+
+  @Before
+  public void setUp() throws Exception {
+    conf = SCMTestUtils.getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_RPC_TIMEOUT, 500,
+        TimeUnit.MILLISECONDS);
+    conf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT, true);
+    serverAddresses = new LinkedList<>();
+    scmServers = new LinkedList<>();
+    mockServers = new LinkedList<>();
+    for (int x = 0; x < scmServerCount; x++) {
+      int port = SCMTestUtils.getReuseableAddress().getPort();
+      String address = "127.0.0.1";
+      serverAddresses.add(address + ":" + port);
+      ScmTestMock mock = new ScmTestMock();
+
+      scmServers.add(SCMTestUtils.startScmRpcServer(conf, mock,
+          new InetSocketAddress(address, port), 10));
+      mockServers.add(mock);
+    }
+
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES,
+        serverAddresses.toArray(new String[0]));
+
+    String path = GenericTestUtils
+        .getTempPath(TestDatanodeStateMachine.class.getSimpleName());
+    testRoot = new File(path);
+    if (!testRoot.mkdirs()) {
+      LOG.info("Required directories {} already exist.", testRoot);
+    }
+
+    File dataDir = new File(testRoot, "data");
+    conf.set(DFS_DATANODE_DATA_DIR_KEY, dataDir.getAbsolutePath());
+    if (!dataDir.mkdirs()) {
+      LOG.info("Data dir create failed.");
+    }
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS,
+        new File(testRoot, "scm").getAbsolutePath());
+    path = Paths.get(path.toString(),
+        TestDatanodeStateMachine.class.getSimpleName() + ".id").toString();
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ID, path);
+    executorService = HadoopExecutors.newCachedThreadPool(
+        new ThreadFactoryBuilder().setDaemon(true)
+            .setNameFormat("Test Data Node State Machine Thread - %d").build());
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    try {
+      if (executorService != null) {
+        executorService.shutdown();
+        try {
+          if (!executorService.awaitTermination(5, TimeUnit.SECONDS)) {
+            executorService.shutdownNow();
+          }
+
+          if (!executorService.awaitTermination(5, TimeUnit.SECONDS)) {
+            LOG.error("Unable to shutdown properly.");
+          }
+        } catch (InterruptedException e) {
+          LOG.error("Error attempting to shutdown.", e);
+          executorService.shutdownNow();
+        }
+      }
+      for (RPC.Server s : scmServers) {
+        s.stop();
+      }
+    } catch (Exception e) {
+      //ignore all execption from the shutdown
+    } finally {
+      testRoot.delete();
+    }
+  }
+
+  /**
+   * Assert that starting statemachine executes the Init State.
+   *
+   * @throws InterruptedException
+   */
+  @Test
+  public void testStartStopDatanodeStateMachine() throws IOException,
+      InterruptedException, TimeoutException {
+    try (DatanodeStateMachine stateMachine =
+        new DatanodeStateMachine(getNewDatanodeDetails(), conf)) {
+      stateMachine.startDaemon();
+      SCMConnectionManager connectionManager =
+          stateMachine.getConnectionManager();
+      GenericTestUtils.waitFor(() -> connectionManager.getValues().size() == 3,
+          1000, 30000);
+
+      stateMachine.stopDaemon();
+      assertTrue(stateMachine.isDaemonStopped());
+    }
+  }
+
+  /**
+   * This test explores the state machine by invoking each call in sequence just
+   * like as if the state machine would call it. Because this is a test we are
+   * able to verify each of the assumptions.
+   * <p>
+   * Here is what happens at High level.
+   * <p>
+   * 1. We start the datanodeStateMachine in the INIT State.
+   * <p>
+   * 2. We invoke the INIT state task.
+   * <p>
+   * 3. That creates a set of RPC endpoints that are ready to connect to SCMs.
+   * <p>
+   * 4. We assert that we have moved to the running state for the
+   * DatanodeStateMachine.
+   * <p>
+   * 5. We get the task for the Running State - Executing that running state,
+   * makes the first network call in of the state machine. The Endpoint is in
+   * the GETVERSION State and we invoke the task.
+   * <p>
+   * 6. We assert that this call was a success by checking that each of the
+   * endponts now have version response that it got from the SCM server that it
+   * was talking to and also each of the mock server serviced one RPC call.
+   * <p>
+   * 7. Since the Register is done now, next calls to get task will return
+   * HeartbeatTask, which sends heartbeats to SCM. We assert that we get right
+   * task from sub-system below.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testDatanodeStateContext() throws IOException,
+      InterruptedException, ExecutionException, TimeoutException {
+    // There is no mini cluster started in this test,
+    // create a ID file so that state machine could load a fake datanode ID.
+    File idPath = new File(
+        conf.get(ScmConfigKeys.OZONE_SCM_DATANODE_ID));
+    idPath.delete();
+    DatanodeDetails datanodeDetails = getNewDatanodeDetails();
+    datanodeDetails.setContainerPort(
+        OzoneConfigKeys.DFS_CONTAINER_IPC_PORT_DEFAULT);
+    ContainerUtils.writeDatanodeDetailsTo(datanodeDetails, idPath);
+
+    try (DatanodeStateMachine stateMachine =
+             new DatanodeStateMachine(datanodeDetails, conf)) {
+      DatanodeStateMachine.DatanodeStates currentState =
+          stateMachine.getContext().getState();
+      Assert.assertEquals(DatanodeStateMachine.DatanodeStates.INIT,
+          currentState);
+
+      DatanodeState<DatanodeStateMachine.DatanodeStates> task =
+          stateMachine.getContext().getTask();
+      Assert.assertEquals(InitDatanodeState.class, task.getClass());
+
+      task.execute(executorService);
+      DatanodeStateMachine.DatanodeStates newState =
+          task.await(2, TimeUnit.SECONDS);
+
+      for (EndpointStateMachine endpoint :
+          stateMachine.getConnectionManager().getValues()) {
+        // We assert that each of the is in State GETVERSION.
+        Assert.assertEquals(EndpointStateMachine.EndPointStates.GETVERSION,
+            endpoint.getState());
+      }
+
+      // The Datanode has moved into Running State, since endpoints are created.
+      // We move to running state when we are ready to issue RPC calls to SCMs.
+      Assert.assertEquals(DatanodeStateMachine.DatanodeStates.RUNNING,
+          newState);
+
+      // If we had called context.execute instead of calling into each state
+      // this would have happened automatically.
+      stateMachine.getContext().setState(newState);
+      task = stateMachine.getContext().getTask();
+      Assert.assertEquals(RunningDatanodeState.class, task.getClass());
+
+      // This execute will invoke getVersion calls against all SCM endpoints
+      // that we know of.
+
+      task.execute(executorService);
+      newState = task.await(10, TimeUnit.SECONDS);
+      // If we are in running state, we should be in running.
+      Assert.assertEquals(DatanodeStateMachine.DatanodeStates.RUNNING,
+          newState);
+
+      for (EndpointStateMachine endpoint :
+          stateMachine.getConnectionManager().getValues()) {
+
+        // Since the earlier task.execute called into GetVersion, the
+        // endPointState Machine should move to REGISTER state.
+        Assert.assertEquals(EndpointStateMachine.EndPointStates.REGISTER,
+            endpoint.getState());
+
+        // We assert that each of the end points have gotten a version from the
+        // SCM Server.
+        Assert.assertNotNull(endpoint.getVersion());
+      }
+
+      // We can also assert that all mock servers have received only one RPC
+      // call at this point of time.
+      for (ScmTestMock mock : mockServers) {
+        Assert.assertEquals(1, mock.getRpcCount());
+      }
+
+      // This task is the Running task, but running task executes tasks based
+      // on the state of Endpoints, hence this next call will be a Register at
+      // the endpoint RPC level.
+      task = stateMachine.getContext().getTask();
+      task.execute(executorService);
+      newState = task.await(2, TimeUnit.SECONDS);
+
+      // If we are in running state, we should be in running.
+      Assert.assertEquals(DatanodeStateMachine.DatanodeStates.RUNNING,
+          newState);
+
+      for (ScmTestMock mock : mockServers) {
+        Assert.assertEquals(2, mock.getRpcCount());
+      }
+
+      // This task is the Running task, but running task executes tasks based
+      // on the state of Endpoints, hence this next call will be a
+      // HeartbeatTask at the endpoint RPC level.
+      task = stateMachine.getContext().getTask();
+      task.execute(executorService);
+      newState = task.await(2, TimeUnit.SECONDS);
+
+      // If we are in running state, we should be in running.
+      Assert.assertEquals(DatanodeStateMachine.DatanodeStates.RUNNING,
+          newState);
+
+
+      for (ScmTestMock mock : mockServers) {
+        Assert.assertEquals(1, mock.getHeartbeatCount());
+        // Assert that heartbeat did indeed carry that State that we said
+        // have in the datanode.
+        Assert.assertEquals(mock.getReportState().getState().getNumber(),
+            StorageContainerDatanodeProtocolProtos.ReportState.states
+                .noContainerReports.getNumber());
+      }
+    }
+  }
+
+  /**
+   * Test state transition with a list of invalid scm configurations,
+   * and verify the state transits to SHUTDOWN each time.
+   */
+  @Test
+  public void testDatanodeStateMachineWithInvalidConfiguration()
+      throws Exception {
+    LinkedList<Map.Entry<String, String>> confList =
+        new LinkedList<Map.Entry<String, String>>();
+    confList.add(Maps.immutableEntry(ScmConfigKeys.OZONE_SCM_NAMES, ""));
+
+    // Invalid ozone.scm.names
+    /** Empty **/
+    confList.add(Maps.immutableEntry(
+        ScmConfigKeys.OZONE_SCM_NAMES, ""));
+    /** Invalid schema **/
+    confList.add(Maps.immutableEntry(
+        ScmConfigKeys.OZONE_SCM_NAMES, "x..y"));
+    /** Invalid port **/
+    confList.add(Maps.immutableEntry(
+        ScmConfigKeys.OZONE_SCM_NAMES, "scm:xyz"));
+    /** Port out of range **/
+    confList.add(Maps.immutableEntry(
+        ScmConfigKeys.OZONE_SCM_NAMES, "scm:123456"));
+    // Invalid ozone.scm.datanode.id
+    /** Empty **/
+    confList.add(Maps.immutableEntry(
+        ScmConfigKeys.OZONE_SCM_DATANODE_ID, ""));
+
+    confList.forEach((entry) -> {
+      Configuration perTestConf = new Configuration(conf);
+      perTestConf.setStrings(entry.getKey(), entry.getValue());
+      LOG.info("Test with {} = {}", entry.getKey(), entry.getValue());
+      try (DatanodeStateMachine stateMachine = new DatanodeStateMachine(
+          getNewDatanodeDetails(), perTestConf)) {
+        DatanodeStateMachine.DatanodeStates currentState =
+            stateMachine.getContext().getState();
+        Assert.assertEquals(DatanodeStateMachine.DatanodeStates.INIT,
+            currentState);
+        DatanodeState<DatanodeStateMachine.DatanodeStates> task =
+            stateMachine.getContext().getTask();
+        task.execute(executorService);
+        DatanodeStateMachine.DatanodeStates newState =
+            task.await(2, TimeUnit.SECONDS);
+        Assert.assertEquals(DatanodeStateMachine.DatanodeStates.SHUTDOWN,
+            newState);
+      } catch (Exception e) {
+        Assert.fail("Unexpected exception found");
+      }
+    });
+  }
+
+  private DatanodeDetails getNewDatanodeDetails() {
+    return DatanodeDetails.newBuilder()
+        .setUuid(UUID.randomUUID().toString())
+        .setHostName("localhost")
+        .setIpAddress("127.0.0.1")
+        .setContainerPort(0)
+        .setRatisPort(0)
+        .setOzoneRestPort(0)
+        .build();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java
new file mode 100644
index 0000000..86888aa
--- /dev/null
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/BlockDeletingServiceTestImpl.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.testutils;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.statemachine.background
+    .BlockDeletingService;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A test class implementation for {@link BlockDeletingService}.
+ */
+public class BlockDeletingServiceTestImpl
+    extends BlockDeletingService {
+
+  // the service timeout
+  private static final int SERVICE_TIMEOUT_IN_MILLISECONDS = 0;
+
+  // tests only
+  private CountDownLatch latch;
+  private Thread testingThread;
+  private AtomicInteger numOfProcessed = new AtomicInteger(0);
+
+  public BlockDeletingServiceTestImpl(ContainerManager containerManager,
+      int serviceInterval, Configuration conf) {
+    super(containerManager, serviceInterval,
+        SERVICE_TIMEOUT_IN_MILLISECONDS, conf);
+  }
+
+  @VisibleForTesting
+  public void runDeletingTasks() {
+    if (latch.getCount() > 0) {
+      this.latch.countDown();
+    } else {
+      throw new IllegalStateException("Count already reaches zero");
+    }
+  }
+
+  @VisibleForTesting
+  public boolean isStarted() {
+    return latch != null && testingThread.isAlive();
+  }
+
+  public int getTimesOfProcessed() {
+    return numOfProcessed.get();
+  }
+
+  // Override the implementation to start a single on-call control thread.
+  @Override public void start() {
+    PeriodicalTask svc = new PeriodicalTask();
+    // In test mode, relies on a latch countdown to runDeletingTasks tasks.
+    Runnable r = () -> {
+      while (true) {
+        latch = new CountDownLatch(1);
+        try {
+          latch.await();
+        } catch (InterruptedException e) {
+          break;
+        }
+        Future<?> future = this.getExecutorService().submit(svc);
+        try {
+          // for tests, we only wait for 3s for completion
+          future.get(3, TimeUnit.SECONDS);
+          numOfProcessed.incrementAndGet();
+        } catch (Exception e) {
+          return;
+        }
+      }
+    };
+
+    testingThread = new ThreadFactoryBuilder()
+        .setDaemon(true)
+        .build()
+        .newThread(r);
+    testingThread.start();
+  }
+
+  @Override
+  public void shutdown() {
+    testingThread.interrupt();
+    super.shutdown();
+  }
+}
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java
new file mode 100644
index 0000000..4e8a90b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.testutils;
+// Helper classes for ozone and container tests.
\ No newline at end of file
diff --git a/hadoop-hdds/container-service/src/test/resources/log4j.properties b/hadoop-hdds/container-service/src/test/resources/log4j.properties
new file mode 100644
index 0000000..bb5cbe5
--- /dev/null
+++ b/hadoop-hdds/container-service/src/test/resources/log4j.properties
@@ -0,0 +1,23 @@
+#
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+# log4j configuration used during build and unit tests
+
+log4j.rootLogger=INFO,stdout
+log4j.threshold=ALL
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
diff --git a/hadoop-hdds/framework/README.md b/hadoop-hdds/framework/README.md
new file mode 100644
index 0000000..0eda3f5
--- /dev/null
+++ b/hadoop-hdds/framework/README.md
@@ -0,0 +1,24 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+# Server framework for HDDS/Ozone
+
+This project contains generic utilities and resources for all the HDDS/Ozone
+server-side components.
+
+The project is shared between the server/service projects but not with the
+client packages.
\ No newline at end of file
diff --git a/hadoop-hdds/framework/pom.xml b/hadoop-hdds/framework/pom.xml
new file mode 100644
index 0000000..c8d0797
--- /dev/null
+++ b/hadoop-hdds/framework/pom.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-hdds</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-hdds-server-framework</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache HDDS server framework</description>
+  <name>Apache HDDS Server Common</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdds</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy web resources</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <copy toDir="${project.build.directory}/classes/webapps/static">
+                  <fileset
+                          dir="${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static">
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/BaseHttpServer.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/BaseHttpServer.java
new file mode 100644
index 0000000..da5d8da
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/BaseHttpServer.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.server;
+
+import com.google.common.base.Optional;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.conf.HddsConfServlet;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.net.NetUtils;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.http.HttpServlet;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import static org.apache.hadoop.hdds.HddsUtils.getHostNameFromConfigKeys;
+import static org.apache.hadoop.hdds.HddsUtils.getPortNumberFromConfigKeys;
+
+/**
+ * Base class for HTTP server of the Ozone related components.
+ */
+public abstract class BaseHttpServer {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BaseHttpServer.class);
+
+  private HttpServer2 httpServer;
+  private final Configuration conf;
+
+  private InetSocketAddress httpAddress;
+  private InetSocketAddress httpsAddress;
+
+  private HttpConfig.Policy policy;
+
+  private String name;
+
+  public BaseHttpServer(Configuration conf, String name) throws IOException {
+    this.name = name;
+    this.conf = conf;
+    if (isEnabled()) {
+      policy = DFSUtil.getHttpPolicy(conf);
+      if (policy.isHttpEnabled()) {
+        this.httpAddress = getHttpBindAddress();
+      }
+      if (policy.isHttpsEnabled()) {
+        this.httpsAddress = getHttpsBindAddress();
+      }
+      HttpServer2.Builder builder = null;
+      builder = DFSUtil.httpServerTemplateForNNAndJN(conf, this.httpAddress,
+          this.httpsAddress, name, getSpnegoPrincipal(), getKeytabFile());
+
+      final boolean xFrameEnabled = conf.getBoolean(
+          DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED,
+          DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
+
+      final String xFrameOptionValue = conf.getTrimmed(
+          DFSConfigKeys.DFS_XFRAME_OPTION_VALUE,
+          DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
+
+      builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
+
+      httpServer = builder.build();
+      httpServer.addServlet("conf", "/conf", HddsConfServlet.class);
+
+    }
+
+  }
+
+  /**
+   * Add a servlet to BaseHttpServer.
+   *
+   * @param servletName The name of the servlet
+   * @param pathSpec    The path spec for the servlet
+   * @param clazz       The servlet class
+   */
+  protected void addServlet(String servletName, String pathSpec,
+      Class<? extends HttpServlet> clazz) {
+    httpServer.addServlet(servletName, pathSpec, clazz);
+  }
+
+  /**
+   * Returns the WebAppContext associated with this HttpServer.
+   *
+   * @return WebAppContext
+   */
+  protected WebAppContext getWebAppContext() {
+    return httpServer.getWebAppContext();
+  }
+
+  protected InetSocketAddress getBindAddress(String bindHostKey,
+      String addressKey, String bindHostDefault, int bindPortdefault) {
+    final Optional<String> bindHost =
+        getHostNameFromConfigKeys(conf, bindHostKey);
+
+    final Optional<Integer> addressPort =
+        getPortNumberFromConfigKeys(conf, addressKey);
+
+    final Optional<String> addresHost =
+        getHostNameFromConfigKeys(conf, addressKey);
+
+    String hostName = bindHost.or(addresHost).or(bindHostDefault);
+
+    return NetUtils.createSocketAddr(
+        hostName + ":" + addressPort.or(bindPortdefault));
+  }
+
+  /**
+   * Retrieve the socket address that should be used by clients to connect
+   * to the  HTTPS web interface.
+   *
+   * @return Target InetSocketAddress for the Ozone HTTPS endpoint.
+   */
+  public InetSocketAddress getHttpsBindAddress() {
+    return getBindAddress(getHttpsBindHostKey(), getHttpsAddressKey(),
+        getBindHostDefault(), getHttpsBindPortDefault());
+  }
+
+  /**
+   * Retrieve the socket address that should be used by clients to connect
+   * to the  HTTP web interface.
+   * <p>
+   * * @return Target InetSocketAddress for the Ozone HTTP endpoint.
+   */
+  public InetSocketAddress getHttpBindAddress() {
+    return getBindAddress(getHttpBindHostKey(), getHttpAddressKey(),
+        getBindHostDefault(), getHttpBindPortDefault());
+
+  }
+
+  public void start() throws IOException {
+    if (httpServer != null && isEnabled()) {
+      httpServer.start();
+      updateConnectorAddress();
+    }
+
+  }
+
+  private boolean isEnabled() {
+    return conf.getBoolean(getEnabledKey(), true);
+  }
+
+  public void stop() throws Exception {
+    if (httpServer != null) {
+      httpServer.stop();
+    }
+  }
+
+  /**
+   * Update the configured listen address based on the real port
+   * <p>
+   * (eg. replace :0 with real port)
+   */
+  public void updateConnectorAddress() {
+    int connIdx = 0;
+    if (policy.isHttpEnabled()) {
+      httpAddress = httpServer.getConnectorAddress(connIdx++);
+      String realAddress = NetUtils.getHostPortString(httpAddress);
+      conf.set(getHttpAddressKey(), realAddress);
+      LOG.info(
+          String.format("HTTP server of %s is listening at http://%s",
+              name.toUpperCase(), realAddress));
+    }
+
+    if (policy.isHttpsEnabled()) {
+      httpsAddress = httpServer.getConnectorAddress(connIdx);
+      String realAddress = NetUtils.getHostPortString(httpsAddress);
+      conf.set(getHttpsAddressKey(), realAddress);
+      LOG.info(
+          String.format("HTTP server of %s is listening at https://%s",
+              name.toUpperCase(), realAddress));
+    }
+  }
+
+  public InetSocketAddress getHttpAddress() {
+    return httpAddress;
+  }
+
+  public InetSocketAddress getHttpsAddress() {
+    return httpsAddress;
+  }
+
+  protected abstract String getHttpAddressKey();
+
+  protected abstract String getHttpsAddressKey();
+
+  protected abstract String getHttpBindHostKey();
+
+  protected abstract String getHttpsBindHostKey();
+
+  protected abstract String getBindHostDefault();
+
+  protected abstract int getHttpBindPortDefault();
+
+  protected abstract int getHttpsBindPortDefault();
+
+  protected abstract String getKeytabFile();
+
+  protected abstract String getSpnegoPrincipal();
+
+  protected abstract String getEnabledKey();
+
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServerUtils.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServerUtils.java
new file mode 100644
index 0000000..a0e78dc
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServerUtils.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.server;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+
+/**
+ * Generic utilities for all HDDS/Ozone servers.
+ */
+public final class ServerUtils {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      ServerUtils.class);
+
+  private ServerUtils() {
+  }
+
+  /**
+   * Checks that a given value is with a range.
+   *
+   * For example, sanitizeUserArgs(17, 3, 5, 10)
+   * ensures that 17 is greater/equal than 3 * 5 and less/equal to 3 * 10.
+   *
+   * @param valueTocheck  - value to check
+   * @param baseValue     - the base value that is being used.
+   * @param minFactor     - range min - a 2 here makes us ensure that value
+   *                        valueTocheck is at least twice the baseValue.
+   * @param maxFactor     - range max
+   * @return long
+   */
+  public static long sanitizeUserArgs(long valueTocheck, long baseValue,
+      long minFactor, long maxFactor)
+      throws IllegalArgumentException {
+    if ((valueTocheck >= (baseValue * minFactor)) &&
+        (valueTocheck <= (baseValue * maxFactor))) {
+      return valueTocheck;
+    }
+    String errMsg = String.format("%d is not within min = %d or max = " +
+        "%d", valueTocheck, baseValue * minFactor, baseValue * maxFactor);
+    throw new IllegalArgumentException(errMsg);
+  }
+
+
+  /**
+   * After starting an RPC server, updates configuration with the actual
+   * listening address of that server. The listening address may be different
+   * from the configured address if, for example, the configured address uses
+   * port 0 to request use of an ephemeral port.
+   *
+   * @param conf configuration to update
+   * @param rpcAddressKey configuration key for RPC server address
+   * @param addr configured address
+   * @param rpcServer started RPC server.
+   */
+  public static InetSocketAddress updateRPCListenAddress(
+      OzoneConfiguration conf, String rpcAddressKey,
+      InetSocketAddress addr, RPC.Server rpcServer) {
+    return updateListenAddress(conf, rpcAddressKey, addr,
+        rpcServer.getListenerAddress());
+  }
+
+
+  /**
+   * After starting an server, updates configuration with the actual
+   * listening address of that server. The listening address may be different
+   * from the configured address if, for example, the configured address uses
+   * port 0 to request use of an ephemeral port.
+   *
+   * @param conf       configuration to update
+   * @param addressKey configuration key for RPC server address
+   * @param addr       configured address
+   * @param listenAddr the real listening address.
+   */
+  public static InetSocketAddress updateListenAddress(OzoneConfiguration conf,
+      String addressKey, InetSocketAddress addr, InetSocketAddress listenAddr) {
+    InetSocketAddress updatedAddr = new InetSocketAddress(addr.getHostString(),
+        listenAddr.getPort());
+    conf.set(addressKey,
+        addr.getHostString() + ":" + listenAddr.getPort());
+    return updatedAddr;
+  }
+
+
+  /**
+   * Releases a http connection if the request is not null.
+   * @param request
+   */
+  public static void releaseConnection(HttpRequestBase request) {
+    if (request != null) {
+      request.releaseConnection();
+    }
+  }
+
+
+  /**
+   * Checks and creates Ozone Metadir Path if it does not exist.
+   *
+   * @param conf - Configuration
+   *
+   * @return File MetaDir
+   */
+  public static File getOzoneMetaDirPath(Configuration conf) {
+    String metaDirPath = conf.getTrimmed(OzoneConfigKeys
+        .OZONE_METADATA_DIRS);
+    Preconditions.checkNotNull(metaDirPath);
+    File dirPath = new File(metaDirPath);
+    if (!dirPath.exists() && !dirPath.mkdirs()) {
+      throw new IllegalArgumentException("Unable to create paths. Path: " +
+          dirPath);
+    }
+    return dirPath;
+  }
+
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfo.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfo.java
new file mode 100644
index 0000000..bcd75f3
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfo.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.server;
+
+/**
+ * Common runtime information for any service components.
+ *
+ * Note: it's intentional to not use MXBean or MBean as a suffix  of the name.
+ *
+ * Most of the services extends the ServiceRuntimeInfoImpl class and also
+ * implements a specific MXBean interface which extends this interface.
+ *
+ * This inheritance from multiple path could confuse the jmx system and
+ * some jmx properties could be disappeared.
+ *
+ * The solution is to always extend this interface and use the jmx naming
+ * convention in the new interface..
+ */
+public interface ServiceRuntimeInfo {
+
+  /**
+   * Gets the version of Hadoop.
+   *
+   * @return the version
+   */
+  String getVersion();
+
+  /**
+   * Get the version of software running on the Namenode.
+   *
+   * @return a string representing the version
+   */
+  String getSoftwareVersion();
+
+  /**
+   * Get the compilation information which contains date, user and branch.
+   *
+   * @return the compilation information, as a JSON string.
+   */
+  String getCompileInfo();
+
+  /**
+   * Gets the NN start time in milliseconds.
+   *
+   * @return the NN start time in msec
+   */
+  long getStartedTimeInMillis();
+
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfoImpl.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfoImpl.java
new file mode 100644
index 0000000..36d6b64
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfoImpl.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.server;
+
+import org.apache.hadoop.util.VersionInfo;
+
+/**
+ * Helper base class to report the standard version and runtime information.
+ *
+ */
+public class ServiceRuntimeInfoImpl implements ServiceRuntimeInfo {
+
+  private long startedTimeInMillis;
+
+  @Override
+  public String getVersion() {
+    return VersionInfo.getVersion() + ", r" + VersionInfo.getRevision();
+  }
+
+  @Override
+  public String getSoftwareVersion() {
+    return VersionInfo.getVersion();
+  }
+
+  @Override
+  public String getCompileInfo() {
+    return VersionInfo.getDate() + " by " + VersionInfo.getUser() + " from "
+        + VersionInfo.getBranch();
+  }
+
+  @Override
+  public long getStartedTimeInMillis() {
+    return startedTimeInMillis;
+  }
+
+  public void setStartTime() {
+    startedTimeInMillis = System.currentTimeMillis();
+  }
+
+}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/package-info.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/package-info.java
new file mode 100644
index 0000000..35ad5e7
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.server;
+
+/**
+ * Common server side utilities for all the hdds/ozone server components.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/datanode/dn.js b/hadoop-hdds/framework/src/main/resources/webapps/datanode/dn.js
new file mode 100644
index 0000000..3b67167
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/datanode/dn.js
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function () {
+  "use strict";
+
+  var data = {ozone: {enabled: false}};
+
+  dust.loadSource(dust.compile($('#tmpl-dn').html(), 'dn'));
+
+  function loadDatanodeInfo() {
+    $.get('/jmx?qry=Hadoop:service=DataNode,name=DataNodeInfo', function(resp) {
+      data.dn = workaround(resp.beans[0]);
+      data.dn.HostName = resp.beans[0]['DatanodeHostname'];
+      render();
+    }).fail(show_err_msg);
+  }
+
+  function loadOzoneScmInfo() {
+        $.get('/jmx?qry=Hadoop:service=OzoneDataNode,name=SCMConnectionManager', function (resp) {
+            if (resp.beans.length > 0) {
+                data.ozone.SCMServers = resp.beans[0].SCMServers;
+                data.ozone.enabled = true;
+                render();
+            }
+        }).fail(show_err_msg);
+  }
+
+  function loadOzoneStorageInfo() {
+        $.get('/jmx?qry=Hadoop:service=OzoneDataNode,name=ContainerLocationManager', function (resp) {
+            if (resp.beans.length > 0) {
+                data.ozone.LocationReport = resp.beans[0].LocationReport;
+                data.ozone.enabled = true;
+                render();
+            }
+        }).fail(show_err_msg);
+    }
+
+  function workaround(dn) {
+    function node_map_to_array(nodes) {
+      var res = [];
+      for (var n in nodes) {
+        var p = nodes[n];
+        p.name = n;
+        res.push(p);
+      }
+      return res;
+    }
+
+    dn.VolumeInfo = node_map_to_array(JSON.parse(dn.VolumeInfo));
+    dn.BPServiceActorInfo = JSON.parse(dn.BPServiceActorInfo);
+
+    return dn;
+  }
+
+  function render() {
+    var base = dust.makeBase({
+      'helper_relative_time' : function (chunk, ctx, bodies, params) {
+        var value = dust.helpers.tap(params.value, chunk, ctx);
+        return chunk.write(moment().subtract(Number(value), 'seconds').fromNow(true));
+      }
+    });
+    dust.render('dn', base.push(data), function(err, out) {
+      $('#tab-overview').html(out);
+      $('#tab-overview').addClass('active');
+    });
+  }
+
+  function show_err_msg() {
+    $('#alert-panel-body').html("Failed to load datanode information");
+    $('#alert-panel').show();
+  }
+
+    loadDatanodeInfo();
+    loadOzoneScmInfo();
+    loadOzoneStorageInfo();
+
+})();
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/angular-1.6.4.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-1.6.4.min.js
new file mode 100644
index 0000000..c4bf158
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-1.6.4.min.js
@@ -0,0 +1,332 @@
+/*
+ AngularJS v1.6.4
+ (c) 2010-2017 Google, Inc. http://angularjs.org
+ License: MIT
+*/
+(function(x){'use strict';function L(a,b){b=b||Error;return function(){var d=arguments[0],c;c="["+(a?a+":":"")+d+"] http://errors.angularjs.org/1.6.4/"+(a?a+"/":"")+d;for(d=1;d<arguments.length;d++){c=c+(1==d?"?":"&")+"p"+(d-1)+"=";var e=encodeURIComponent,f;f=arguments[d];f="function"==typeof f?f.toString().replace(/ \{[\s\S]*$/,""):"undefined"==typeof f?"undefined":"string"!=typeof f?JSON.stringify(f):f;c+=e(f)}return new b(c)}}function me(a){if(C(a))u(a.objectMaxDepth)&&(Ic.objectMaxDepth=Sb(a.objectMaxDepth)?
+a.objectMaxDepth:NaN);else return Ic}function Sb(a){return ba(a)&&0<a}function qa(a){if(null==a||Wa(a))return!1;if(H(a)||F(a)||B&&a instanceof B)return!0;var b="length"in Object(a)&&a.length;return ba(b)&&(0<=b&&(b-1 in a||a instanceof Array)||"function"===typeof a.item)}function q(a,b,d){var c,e;if(a)if(D(a))for(c in a)"prototype"!==c&&"length"!==c&&"name"!==c&&a.hasOwnProperty(c)&&b.call(d,a[c],c,a);else if(H(a)||qa(a)){var f="object"!==typeof a;c=0;for(e=a.length;c<e;c++)(f||c in a)&&b.call(d,
+a[c],c,a)}else if(a.forEach&&a.forEach!==q)a.forEach(b,d,a);else if(Jc(a))for(c in a)b.call(d,a[c],c,a);else if("function"===typeof a.hasOwnProperty)for(c in a)a.hasOwnProperty(c)&&b.call(d,a[c],c,a);else for(c in a)ua.call(a,c)&&b.call(d,a[c],c,a);return a}function Kc(a,b,d){for(var c=Object.keys(a).sort(),e=0;e<c.length;e++)b.call(d,a[c[e]],c[e]);return c}function Lc(a){return function(b,d){a(d,b)}}function ne(){return++qb}function Tb(a,b,d){for(var c=a.$$hashKey,e=0,f=b.length;e<f;++e){var g=b[e];
+if(C(g)||D(g))for(var h=Object.keys(g),k=0,l=h.length;k<l;k++){var m=h[k],n=g[m];d&&C(n)?ga(n)?a[m]=new Date(n.valueOf()):Xa(n)?a[m]=new RegExp(n):n.nodeName?a[m]=n.cloneNode(!0):Ub(n)?a[m]=n.clone():(C(a[m])||(a[m]=H(n)?[]:{}),Tb(a[m],[n],!0)):a[m]=n}}c?a.$$hashKey=c:delete a.$$hashKey;return a}function S(a){return Tb(a,va.call(arguments,1),!1)}function oe(a){return Tb(a,va.call(arguments,1),!0)}function Z(a){return parseInt(a,10)}function Vb(a,b){return S(Object.create(a),b)}function z(){}function Ya(a){return a}
+function la(a){return function(){return a}}function Wb(a){return D(a.toString)&&a.toString!==ma}function w(a){return"undefined"===typeof a}function u(a){return"undefined"!==typeof a}function C(a){return null!==a&&"object"===typeof a}function Jc(a){return null!==a&&"object"===typeof a&&!Mc(a)}function F(a){return"string"===typeof a}function ba(a){return"number"===typeof a}function ga(a){return"[object Date]"===ma.call(a)}function D(a){return"function"===typeof a}function Xa(a){return"[object RegExp]"===
+ma.call(a)}function Wa(a){return a&&a.window===a}function Za(a){return a&&a.$evalAsync&&a.$watch}function Ha(a){return"boolean"===typeof a}function pe(a){return a&&ba(a.length)&&qe.test(ma.call(a))}function Ub(a){return!(!a||!(a.nodeName||a.prop&&a.attr&&a.find))}function re(a){var b={};a=a.split(",");var d;for(d=0;d<a.length;d++)b[a[d]]=!0;return b}function wa(a){return Q(a.nodeName||a[0]&&a[0].nodeName)}function $a(a,b){var d=a.indexOf(b);0<=d&&a.splice(d,1);return d}function ra(a,b,d){function c(a,
+b,c){c--;if(0>c)return"...";var d=b.$$hashKey,f;if(H(a)){f=0;for(var g=a.length;f<g;f++)b.push(e(a[f],c))}else if(Jc(a))for(f in a)b[f]=e(a[f],c);else if(a&&"function"===typeof a.hasOwnProperty)for(f in a)a.hasOwnProperty(f)&&(b[f]=e(a[f],c));else for(f in a)ua.call(a,f)&&(b[f]=e(a[f],c));d?b.$$hashKey=d:delete b.$$hashKey;return b}function e(a,b){if(!C(a))return a;var d=g.indexOf(a);if(-1!==d)return h[d];if(Wa(a)||Za(a))throw Fa("cpws");var d=!1,e=f(a);void 0===e&&(e=H(a)?[]:Object.create(Mc(a)),
+d=!0);g.push(a);h.push(e);return d?c(a,e,b):e}function f(a){switch(ma.call(a)){case "[object Int8Array]":case "[object Int16Array]":case "[object Int32Array]":case "[object Float32Array]":case "[object Float64Array]":case "[object Uint8Array]":case "[object Uint8ClampedArray]":case "[object Uint16Array]":case "[object Uint32Array]":return new a.constructor(e(a.buffer),a.byteOffset,a.length);case "[object ArrayBuffer]":if(!a.slice){var b=new ArrayBuffer(a.byteLength);(new Uint8Array(b)).set(new Uint8Array(a));
+return b}return a.slice(0);case "[object Boolean]":case "[object Number]":case "[object String]":case "[object Date]":return new a.constructor(a.valueOf());case "[object RegExp]":return b=new RegExp(a.source,a.toString().match(/[^/]*$/)[0]),b.lastIndex=a.lastIndex,b;case "[object Blob]":return new a.constructor([a],{type:a.type})}if(D(a.cloneNode))return a.cloneNode(!0)}var g=[],h=[];d=Sb(d)?d:NaN;if(b){if(pe(b)||"[object ArrayBuffer]"===ma.call(b))throw Fa("cpta");if(a===b)throw Fa("cpi");H(b)?b.length=
+0:q(b,function(a,c){"$$hashKey"!==c&&delete b[c]});g.push(a);h.push(b);return c(a,b,d)}return e(a,d)}function Xb(a,b){return a===b||a!==a&&b!==b}function sa(a,b){if(a===b)return!0;if(null===a||null===b)return!1;if(a!==a&&b!==b)return!0;var d=typeof a,c;if(d===typeof b&&"object"===d)if(H(a)){if(!H(b))return!1;if((d=a.length)===b.length){for(c=0;c<d;c++)if(!sa(a[c],b[c]))return!1;return!0}}else{if(ga(a))return ga(b)?Xb(a.getTime(),b.getTime()):!1;if(Xa(a))return Xa(b)?a.toString()===b.toString():!1;
+if(Za(a)||Za(b)||Wa(a)||Wa(b)||H(b)||ga(b)||Xa(b))return!1;d=V();for(c in a)if("$"!==c.charAt(0)&&!D(a[c])){if(!sa(a[c],b[c]))return!1;d[c]=!0}for(c in b)if(!(c in d)&&"$"!==c.charAt(0)&&u(b[c])&&!D(b[c]))return!1;return!0}return!1}function ab(a,b,d){return a.concat(va.call(b,d))}function bb(a,b){var d=2<arguments.length?va.call(arguments,2):[];return!D(b)||b instanceof RegExp?b:d.length?function(){return arguments.length?b.apply(a,ab(d,arguments,0)):b.apply(a,d)}:function(){return arguments.length?
+b.apply(a,arguments):b.call(a)}}function Nc(a,b){var d=b;"string"===typeof a&&"$"===a.charAt(0)&&"$"===a.charAt(1)?d=void 0:Wa(b)?d="$WINDOW":b&&x.document===b?d="$DOCUMENT":Za(b)&&(d="$SCOPE");return d}function cb(a,b){if(!w(a))return ba(b)||(b=b?2:null),JSON.stringify(a,Nc,b)}function Oc(a){return F(a)?JSON.parse(a):a}function Pc(a,b){a=a.replace(se,"");var d=Date.parse("Jan 01, 1970 00:00:00 "+a)/6E4;return da(d)?b:d}function Yb(a,b,d){d=d?-1:1;var c=a.getTimezoneOffset();b=Pc(b,c);d*=b-c;a=new Date(a.getTime());
+a.setMinutes(a.getMinutes()+d);return a}function xa(a){a=B(a).clone();try{a.empty()}catch(b){}var d=B("<div>").append(a).html();try{return a[0].nodeType===Ia?Q(d):d.match(/^(<[^>]+>)/)[1].replace(/^<([\w-]+)/,function(a,b){return"<"+Q(b)})}catch(c){return Q(d)}}function Qc(a){try{return decodeURIComponent(a)}catch(b){}}function Rc(a){var b={};q((a||"").split("&"),function(a){var c,e,f;a&&(e=a=a.replace(/\+/g,"%20"),c=a.indexOf("="),-1!==c&&(e=a.substring(0,c),f=a.substring(c+1)),e=Qc(e),u(e)&&(f=
+u(f)?Qc(f):!0,ua.call(b,e)?H(b[e])?b[e].push(f):b[e]=[b[e],f]:b[e]=f))});return b}function Zb(a){var b=[];q(a,function(a,c){H(a)?q(a,function(a){b.push($(c,!0)+(!0===a?"":"="+$(a,!0)))}):b.push($(c,!0)+(!0===a?"":"="+$(a,!0)))});return b.length?b.join("&"):""}function db(a){return $(a,!0).replace(/%26/gi,"&").replace(/%3D/gi,"=").replace(/%2B/gi,"+")}function $(a,b){return encodeURIComponent(a).replace(/%40/gi,"@").replace(/%3A/gi,":").replace(/%24/g,"$").replace(/%2C/gi,",").replace(/%3B/gi,";").replace(/%20/g,
+b?"%20":"+")}function te(a,b){var d,c,e=Ja.length;for(c=0;c<e;++c)if(d=Ja[c]+b,F(d=a.getAttribute(d)))return d;return null}function ue(a,b){var d,c,e={};q(Ja,function(b){b+="app";!d&&a.hasAttribute&&a.hasAttribute(b)&&(d=a,c=a.getAttribute(b))});q(Ja,function(b){b+="app";var e;!d&&(e=a.querySelector("["+b.replace(":","\\:")+"]"))&&(d=e,c=e.getAttribute(b))});d&&(ve?(e.strictDi=null!==te(d,"strict-di"),b(d,c?[c]:[],e)):x.console.error("Angular: disabling automatic bootstrap. <script> protocol indicates an extension, document.location.href does not match."))}
+function Sc(a,b,d){C(d)||(d={});d=S({strictDi:!1},d);var c=function(){a=B(a);if(a.injector()){var c=a[0]===x.document?"document":xa(a);throw Fa("btstrpd",c.replace(/</,"&lt;").replace(/>/,"&gt;"));}b=b||[];b.unshift(["$provide",function(b){b.value("$rootElement",a)}]);d.debugInfoEnabled&&b.push(["$compileProvider",function(a){a.debugInfoEnabled(!0)}]);b.unshift("ng");c=eb(b,d.strictDi);c.invoke(["$rootScope","$rootElement","$compile","$injector",function(a,b,c,d){a.$apply(function(){b.data("$injector",
+d);c(b)(a)})}]);return c},e=/^NG_ENABLE_DEBUG_INFO!/,f=/^NG_DEFER_BOOTSTRAP!/;x&&e.test(x.name)&&(d.debugInfoEnabled=!0,x.name=x.name.replace(e,""));if(x&&!f.test(x.name))return c();x.name=x.name.replace(f,"");ea.resumeBootstrap=function(a){q(a,function(a){b.push(a)});return c()};D(ea.resumeDeferredBootstrap)&&ea.resumeDeferredBootstrap()}function we(){x.name="NG_ENABLE_DEBUG_INFO!"+x.name;x.location.reload()}function xe(a){a=ea.element(a).injector();if(!a)throw Fa("test");return a.get("$$testability")}
+function Tc(a,b){b=b||"_";return a.replace(ye,function(a,c){return(c?b:"")+a.toLowerCase()})}function ze(){var a;if(!Uc){var b=rb();(na=w(b)?x.jQuery:b?x[b]:void 0)&&na.fn.on?(B=na,S(na.fn,{scope:Na.scope,isolateScope:Na.isolateScope,controller:Na.controller,injector:Na.injector,inheritedData:Na.inheritedData}),a=na.cleanData,na.cleanData=function(b){for(var c,e=0,f;null!=(f=b[e]);e++)(c=na._data(f,"events"))&&c.$destroy&&na(f).triggerHandler("$destroy");a(b)}):B=W;ea.element=B;Uc=!0}}function fb(a,
+b,d){if(!a)throw Fa("areq",b||"?",d||"required");return a}function sb(a,b,d){d&&H(a)&&(a=a[a.length-1]);fb(D(a),b,"not a function, got "+(a&&"object"===typeof a?a.constructor.name||"Object":typeof a));return a}function Ka(a,b){if("hasOwnProperty"===a)throw Fa("badname",b);}function Vc(a,b,d){if(!b)return a;b=b.split(".");for(var c,e=a,f=b.length,g=0;g<f;g++)c=b[g],a&&(a=(e=a)[c]);return!d&&D(a)?bb(e,a):a}function tb(a){for(var b=a[0],d=a[a.length-1],c,e=1;b!==d&&(b=b.nextSibling);e++)if(c||a[e]!==
+b)c||(c=B(va.call(a,0,e))),c.push(b);return c||a}function V(){return Object.create(null)}function $b(a){if(null==a)return"";switch(typeof a){case "string":break;case "number":a=""+a;break;default:a=!Wb(a)||H(a)||ga(a)?cb(a):a.toString()}return a}function Ae(a){function b(a,b,c){return a[b]||(a[b]=c())}var d=L("$injector"),c=L("ng");a=b(a,"angular",Object);a.$$minErr=a.$$minErr||L;return b(a,"module",function(){var a={};return function(f,g,h){var k={};if("hasOwnProperty"===f)throw c("badname","module");
+g&&a.hasOwnProperty(f)&&(a[f]=null);return b(a,f,function(){function a(b,c,d,f){f||(f=e);return function(){f[d||"push"]([b,c,arguments]);return v}}function b(a,c,d){d||(d=e);return function(b,e){e&&D(e)&&(e.$$moduleName=f);d.push([a,c,arguments]);return v}}if(!g)throw d("nomod",f);var e=[],p=[],r=[],J=a("$injector","invoke","push",p),v={_invokeQueue:e,_configBlocks:p,_runBlocks:r,info:function(a){if(u(a)){if(!C(a))throw c("aobj","value");k=a;return this}return k},requires:g,name:f,provider:b("$provide",
+"provider"),factory:b("$provide","factory"),service:b("$provide","service"),value:a("$provide","value"),constant:a("$provide","constant","unshift"),decorator:b("$provide","decorator",p),animation:b("$animateProvider","register"),filter:b("$filterProvider","register"),controller:b("$controllerProvider","register"),directive:b("$compileProvider","directive"),component:b("$compileProvider","component"),config:J,run:function(a){r.push(a);return this}};h&&J(h);return v})}})}function pa(a,b){if(H(a)){b=
+b||[];for(var d=0,c=a.length;d<c;d++)b[d]=a[d]}else if(C(a))for(d in b=b||{},a)if("$"!==d.charAt(0)||"$"!==d.charAt(1))b[d]=a[d];return b||a}function Be(a,b){var d=[];Sb(b)&&(a=ra(a,null,b));return JSON.stringify(a,function(a,b){b=Nc(a,b);if(C(b)){if(0<=d.indexOf(b))return"...";d.push(b)}return b})}function Ce(a){S(a,{errorHandlingConfig:me,bootstrap:Sc,copy:ra,extend:S,merge:oe,equals:sa,element:B,forEach:q,injector:eb,noop:z,bind:bb,toJson:cb,fromJson:Oc,identity:Ya,isUndefined:w,isDefined:u,isString:F,
+isFunction:D,isObject:C,isNumber:ba,isElement:Ub,isArray:H,version:De,isDate:ga,lowercase:Q,uppercase:ub,callbacks:{$$counter:0},getTestability:xe,reloadWithDebugInfo:we,$$minErr:L,$$csp:Ga,$$encodeUriSegment:db,$$encodeUriQuery:$,$$stringify:$b});ac=Ae(x);ac("ng",["ngLocale"],["$provide",function(a){a.provider({$$sanitizeUri:Ee});a.provider("$compile",Wc).directive({a:Fe,input:Xc,textarea:Xc,form:Ge,script:He,select:Ie,option:Je,ngBind:Ke,ngBindHtml:Le,ngBindTemplate:Me,ngClass:Ne,ngClassEven:Oe,
+ngClassOdd:Pe,ngCloak:Qe,ngController:Re,ngForm:Se,ngHide:Te,ngIf:Ue,ngInclude:Ve,ngInit:We,ngNonBindable:Xe,ngPluralize:Ye,ngRepeat:Ze,ngShow:$e,ngStyle:af,ngSwitch:bf,ngSwitchWhen:cf,ngSwitchDefault:df,ngOptions:ef,ngTransclude:ff,ngModel:gf,ngList:hf,ngChange:jf,pattern:Yc,ngPattern:Yc,required:Zc,ngRequired:Zc,minlength:$c,ngMinlength:$c,maxlength:ad,ngMaxlength:ad,ngValue:kf,ngModelOptions:lf}).directive({ngInclude:mf}).directive(vb).directive(bd);a.provider({$anchorScroll:nf,$animate:of,$animateCss:pf,
+$$animateJs:qf,$$animateQueue:rf,$$AnimateRunner:sf,$$animateAsyncRun:tf,$browser:uf,$cacheFactory:vf,$controller:wf,$document:xf,$$isDocumentHidden:yf,$exceptionHandler:zf,$filter:cd,$$forceReflow:Af,$interpolate:Bf,$interval:Cf,$http:Df,$httpParamSerializer:Ef,$httpParamSerializerJQLike:Ff,$httpBackend:Gf,$xhrFactory:Hf,$jsonpCallbacks:If,$location:Jf,$log:Kf,$parse:Lf,$rootScope:Mf,$q:Nf,$$q:Of,$sce:Pf,$sceDelegate:Qf,$sniffer:Rf,$templateCache:Sf,$templateRequest:Tf,$$testability:Uf,$timeout:Vf,
+$window:Wf,$$rAF:Xf,$$jqLite:Yf,$$Map:Zf,$$cookieReader:$f})}]).info({angularVersion:"1.6.4"})}function gb(a,b){return b.toUpperCase()}function wb(a){return a.replace(ag,gb)}function bc(a){a=a.nodeType;return 1===a||!a||9===a}function dd(a,b){var d,c,e=b.createDocumentFragment(),f=[];if(cc.test(a)){d=e.appendChild(b.createElement("div"));c=(bg.exec(a)||["",""])[1].toLowerCase();c=ha[c]||ha._default;d.innerHTML=c[1]+a.replace(cg,"<$1></$2>")+c[2];for(c=c[0];c--;)d=d.lastChild;f=ab(f,d.childNodes);
+d=e.firstChild;d.textContent=""}else f.push(b.createTextNode(a));e.textContent="";e.innerHTML="";q(f,function(a){e.appendChild(a)});return e}function W(a){if(a instanceof W)return a;var b;F(a)&&(a=T(a),b=!0);if(!(this instanceof W)){if(b&&"<"!==a.charAt(0))throw dc("nosel");return new W(a)}if(b){b=x.document;var d;a=(d=dg.exec(a))?[b.createElement(d[1])]:(d=dd(a,b))?d.childNodes:[];ec(this,a)}else D(a)?ed(a):ec(this,a)}function fc(a){return a.cloneNode(!0)}function xb(a,b){!b&&bc(a)&&B.cleanData([a]);
+a.querySelectorAll&&B.cleanData(a.querySelectorAll("*"))}function fd(a,b,d,c){if(u(c))throw dc("offargs");var e=(c=yb(a))&&c.events,f=c&&c.handle;if(f)if(b){var g=function(b){var c=e[b];u(d)&&$a(c||[],d);u(d)&&c&&0<c.length||(a.removeEventListener(b,f),delete e[b])};q(b.split(" "),function(a){g(a);zb[a]&&g(zb[a])})}else for(b in e)"$destroy"!==b&&a.removeEventListener(b,f),delete e[b]}function gc(a,b){var d=a.ng339,c=d&&hb[d];c&&(b?delete c.data[b]:(c.handle&&(c.events.$destroy&&c.handle({},"$destroy"),
+fd(a)),delete hb[d],a.ng339=void 0))}function yb(a,b){var d=a.ng339,d=d&&hb[d];b&&!d&&(a.ng339=d=++eg,d=hb[d]={events:{},data:{},handle:void 0});return d}function hc(a,b,d){if(bc(a)){var c,e=u(d),f=!e&&b&&!C(b),g=!b;a=(a=yb(a,!f))&&a.data;if(e)a[wb(b)]=d;else{if(g)return a;if(f)return a&&a[wb(b)];for(c in b)a[wb(c)]=b[c]}}}function Ab(a,b){return a.getAttribute?-1<(" "+(a.getAttribute("class")||"")+" ").replace(/[\n\t]/g," ").indexOf(" "+b+" "):!1}function Bb(a,b){b&&a.setAttribute&&q(b.split(" "),
+function(b){a.setAttribute("class",T((" "+(a.getAttribute("class")||"")+" ").replace(/[\n\t]/g," ").replace(" "+T(b)+" "," ")))})}function Cb(a,b){if(b&&a.setAttribute){var d=(" "+(a.getAttribute("class")||"")+" ").replace(/[\n\t]/g," ");q(b.split(" "),function(a){a=T(a);-1===d.indexOf(" "+a+" ")&&(d+=a+" ")});a.setAttribute("class",T(d))}}function ec(a,b){if(b)if(b.nodeType)a[a.length++]=b;else{var d=b.length;if("number"===typeof d&&b.window!==b){if(d)for(var c=0;c<d;c++)a[a.length++]=b[c]}else a[a.length++]=
+b}}function gd(a,b){return Db(a,"$"+(b||"ngController")+"Controller")}function Db(a,b,d){9===a.nodeType&&(a=a.documentElement);for(b=H(b)?b:[b];a;){for(var c=0,e=b.length;c<e;c++)if(u(d=B.data(a,b[c])))return d;a=a.parentNode||11===a.nodeType&&a.host}}function hd(a){for(xb(a,!0);a.firstChild;)a.removeChild(a.firstChild)}function Eb(a,b){b||xb(a);var d=a.parentNode;d&&d.removeChild(a)}function fg(a,b){b=b||x;if("complete"===b.document.readyState)b.setTimeout(a);else B(b).on("load",a)}function ed(a){function b(){x.document.removeEventListener("DOMContentLoaded",
+b);x.removeEventListener("load",b);a()}"complete"===x.document.readyState?x.setTimeout(a):(x.document.addEventListener("DOMContentLoaded",b),x.addEventListener("load",b))}function id(a,b){var d=Fb[b.toLowerCase()];return d&&jd[wa(a)]&&d}function gg(a,b){var d=function(c,d){c.isDefaultPrevented=function(){return c.defaultPrevented};var f=b[d||c.type],g=f?f.length:0;if(g){if(w(c.immediatePropagationStopped)){var h=c.stopImmediatePropagation;c.stopImmediatePropagation=function(){c.immediatePropagationStopped=
+!0;c.stopPropagation&&c.stopPropagation();h&&h.call(c)}}c.isImmediatePropagationStopped=function(){return!0===c.immediatePropagationStopped};var k=f.specialHandlerWrapper||hg;1<g&&(f=pa(f));for(var l=0;l<g;l++)c.isImmediatePropagationStopped()||k(a,c,f[l])}};d.elem=a;return d}function hg(a,b,d){d.call(a,b)}function ig(a,b,d){var c=b.relatedTarget;c&&(c===a||jg.call(a,c))||d.call(a,b)}function Yf(){this.$get=function(){return S(W,{hasClass:function(a,b){a.attr&&(a=a[0]);return Ab(a,b)},addClass:function(a,
+b){a.attr&&(a=a[0]);return Cb(a,b)},removeClass:function(a,b){a.attr&&(a=a[0]);return Bb(a,b)}})}}function Pa(a,b){var d=a&&a.$$hashKey;if(d)return"function"===typeof d&&(d=a.$$hashKey()),d;d=typeof a;return d="function"===d||"object"===d&&null!==a?a.$$hashKey=d+":"+(b||ne)():d+":"+a}function kd(){this._keys=[];this._values=[];this._lastKey=NaN;this._lastIndex=-1}function ld(a){a=Function.prototype.toString.call(a).replace(kg,"");return a.match(lg)||a.match(mg)}function ng(a){return(a=ld(a))?"function("+
+(a[1]||"").replace(/[\s\r\n]+/," ")+")":"fn"}function eb(a,b){function d(a){return function(b,c){if(C(b))q(b,Lc(a));else return a(b,c)}}function c(a,b){Ka(a,"service");if(D(b)||H(b))b=p.instantiate(b);if(!b.$get)throw ya("pget",a);return n[a+"Provider"]=b}function e(a,b){return function(){var c=v.invoke(b,this);if(w(c))throw ya("undef",a);return c}}function f(a,b,d){return c(a,{$get:!1!==d?e(a,b):b})}function g(a){fb(w(a)||H(a),"modulesToLoad","not an array");var b=[],c;q(a,function(a){function d(a){var b,
+c;b=0;for(c=a.length;b<c;b++){var e=a[b],f=p.get(e[0]);f[e[1]].apply(f,e[2])}}if(!m.get(a)){m.set(a,!0);try{F(a)?(c=ac(a),v.modules[a]=c,b=b.concat(g(c.requires)).concat(c._runBlocks),d(c._invokeQueue),d(c._configBlocks)):D(a)?b.push(p.invoke(a)):H(a)?b.push(p.invoke(a)):sb(a,"module")}catch(e){throw H(a)&&(a=a[a.length-1]),e.message&&e.stack&&-1===e.stack.indexOf(e.message)&&(e=e.message+"\n"+e.stack),ya("modulerr",a,e.stack||e.message||e);}}});return b}function h(a,c){function d(b,e){if(a.hasOwnProperty(b)){if(a[b]===
+k)throw ya("cdep",b+" <- "+l.join(" <- "));return a[b]}try{return l.unshift(b),a[b]=k,a[b]=c(b,e),a[b]}catch(f){throw a[b]===k&&delete a[b],f;}finally{l.shift()}}function e(a,c,f){var g=[];a=eb.$$annotate(a,b,f);for(var k=0,h=a.length;k<h;k++){var l=a[k];if("string"!==typeof l)throw ya("itkn",l);g.push(c&&c.hasOwnProperty(l)?c[l]:d(l,f))}return g}return{invoke:function(a,b,c,d){"string"===typeof c&&(d=c,c=null);c=e(a,c,d);H(a)&&(a=a[a.length-1]);d=a;if(za||"function"!==typeof d)d=!1;else{var f=d.$$ngIsClass;
+Ha(f)||(f=d.$$ngIsClass=/^(?:class\b|constructor\()/.test(Function.prototype.toString.call(d)));d=f}return d?(c.unshift(null),new (Function.prototype.bind.apply(a,c))):a.apply(b,c)},instantiate:function(a,b,c){var d=H(a)?a[a.length-1]:a;a=e(a,b,c);a.unshift(null);return new (Function.prototype.bind.apply(d,a))},get:d,annotate:eb.$$annotate,has:function(b){return n.hasOwnProperty(b+"Provider")||a.hasOwnProperty(b)}}}b=!0===b;var k={},l=[],m=new Gb,n={$provide:{provider:d(c),factory:d(f),service:d(function(a,
+b){return f(a,["$injector",function(a){return a.instantiate(b)}])}),value:d(function(a,b){return f(a,la(b),!1)}),constant:d(function(a,b){Ka(a,"constant");n[a]=b;r[a]=b}),decorator:function(a,b){var c=p.get(a+"Provider"),d=c.$get;c.$get=function(){var a=v.invoke(d,c);return v.invoke(b,null,{$delegate:a})}}}},p=n.$injector=h(n,function(a,b){ea.isString(b)&&l.push(b);throw ya("unpr",l.join(" <- "));}),r={},J=h(r,function(a,b){var c=p.get(a+"Provider",b);return v.invoke(c.$get,c,void 0,a)}),v=J;n.$injectorProvider=
+{$get:la(J)};v.modules=p.modules=V();var t=g(a),v=J.get("$injector");v.strictDi=b;q(t,function(a){a&&v.invoke(a)});return v}function nf(){var a=!0;this.disableAutoScrolling=function(){a=!1};this.$get=["$window","$location","$rootScope",function(b,d,c){function e(a){var b=null;Array.prototype.some.call(a,function(a){if("a"===wa(a))return b=a,!0});return b}function f(a){if(a){a.scrollIntoView();var c;c=g.yOffset;D(c)?c=c():Ub(c)?(c=c[0],c="fixed"!==b.getComputedStyle(c).position?0:c.getBoundingClientRect().bottom):
+ba(c)||(c=0);c&&(a=a.getBoundingClientRect().top,b.scrollBy(0,a-c))}else b.scrollTo(0,0)}function g(a){a=F(a)?a:ba(a)?a.toString():d.hash();var b;a?(b=h.getElementById(a))?f(b):(b=e(h.getElementsByName(a)))?f(b):"top"===a&&f(null):f(null)}var h=b.document;a&&c.$watch(function(){return d.hash()},function(a,b){a===b&&""===a||fg(function(){c.$evalAsync(g)})});return g}]}function ib(a,b){if(!a&&!b)return"";if(!a)return b;if(!b)return a;H(a)&&(a=a.join(" "));H(b)&&(b=b.join(" "));return a+" "+b}function og(a){F(a)&&
+(a=a.split(" "));var b=V();q(a,function(a){a.length&&(b[a]=!0)});return b}function ia(a){return C(a)?a:{}}function pg(a,b,d,c){function e(a){try{a.apply(null,va.call(arguments,1))}finally{if(J--,0===J)for(;v.length;)try{v.pop()()}catch(b){d.error(b)}}}function f(){Oa=null;h()}function g(){t=I();t=w(t)?null:t;sa(t,G)&&(t=G);M=G=t}function h(){var a=M;g();if(N!==k.url()||a!==t)N=k.url(),M=t,q(K,function(a){a(k.url(),t)})}var k=this,l=a.location,m=a.history,n=a.setTimeout,p=a.clearTimeout,r={};k.isMock=
+!1;var J=0,v=[];k.$$completeOutstandingRequest=e;k.$$incOutstandingRequestCount=function(){J++};k.notifyWhenNoOutstandingRequests=function(a){0===J?a():v.push(a)};var t,M,N=l.href,A=b.find("base"),Oa=null,I=c.history?function(){try{return m.state}catch(a){}}:z;g();k.url=function(b,d,e){w(e)&&(e=null);l!==a.location&&(l=a.location);m!==a.history&&(m=a.history);if(b){var f=M===e;if(N===b&&(!c.history||f))return k;var h=N&&Aa(N)===Aa(b);N=b;M=e;!c.history||h&&f?(h||(Oa=b),d?l.replace(b):h?(d=l,e=b.indexOf("#"),
+e=-1===e?"":b.substr(e),d.hash=e):l.href=b,l.href!==b&&(Oa=b)):(m[d?"replaceState":"pushState"](e,"",b),g());Oa&&(Oa=b);return k}return Oa||l.href.replace(/%27/g,"'")};k.state=function(){return t};var K=[],E=!1,G=null;k.onUrlChange=function(b){if(!E){if(c.history)B(a).on("popstate",f);B(a).on("hashchange",f);E=!0}K.push(b);return b};k.$$applicationDestroyed=function(){B(a).off("hashchange popstate",f)};k.$$checkUrlChange=h;k.baseHref=function(){var a=A.attr("href");return a?a.replace(/^(https?:)?\/\/[^/]*/,
+""):""};k.defer=function(a,b){var c;J++;c=n(function(){delete r[c];e(a)},b||0);r[c]=!0;return c};k.defer.cancel=function(a){return r[a]?(delete r[a],p(a),e(z),!0):!1}}function uf(){this.$get=["$window","$log","$sniffer","$document",function(a,b,d,c){return new pg(a,c,b,d)}]}function vf(){this.$get=function(){function a(a,c){function e(a){a!==n&&(p?p===a&&(p=a.n):p=a,f(a.n,a.p),f(a,n),n=a,n.n=null)}function f(a,b){a!==b&&(a&&(a.p=b),b&&(b.n=a))}if(a in b)throw L("$cacheFactory")("iid",a);var g=0,h=
+S({},c,{id:a}),k=V(),l=c&&c.capacity||Number.MAX_VALUE,m=V(),n=null,p=null;return b[a]={put:function(a,b){if(!w(b)){if(l<Number.MAX_VALUE){var c=m[a]||(m[a]={key:a});e(c)}a in k||g++;k[a]=b;g>l&&this.remove(p.key);return b}},get:function(a){if(l<Number.MAX_VALUE){var b=m[a];if(!b)return;e(b)}return k[a]},remove:function(a){if(l<Number.MAX_VALUE){var b=m[a];if(!b)return;b===n&&(n=b.p);b===p&&(p=b.n);f(b.n,b.p);delete m[a]}a in k&&(delete k[a],g--)},removeAll:function(){k=V();g=0;m=V();n=p=null},destroy:function(){m=
+h=k=null;delete b[a]},info:function(){return S({},h,{size:g})}}}var b={};a.info=function(){var a={};q(b,function(b,e){a[e]=b.info()});return a};a.get=function(a){return b[a]};return a}}function Sf(){this.$get=["$cacheFactory",function(a){return a("templates")}]}function Wc(a,b){function d(a,b,c){var d=/^\s*([@&<]|=(\*?))(\??)\s*([\w$]*)\s*$/,e=V();q(a,function(a,f){if(a in n)e[f]=n[a];else{var g=a.match(d);if(!g)throw fa("iscp",b,f,a,c?"controller bindings definition":"isolate scope definition");
+e[f]={mode:g[1][0],collection:"*"===g[2],optional:"?"===g[3],attrName:g[4]||f};g[4]&&(n[a]=e[f])}});return e}function c(a){var b=a.charAt(0);if(!b||b!==Q(b))throw fa("baddir",a);if(a!==a.trim())throw fa("baddir",a);}function e(a){var b=a.require||a.controller&&a.name;!H(b)&&C(b)&&q(b,function(a,c){var d=a.match(l);a.substring(d[0].length)||(b[c]=d[0]+c)});return b}var f={},g=/^\s*directive:\s*([\w-]+)\s+(.*)$/,h=/(([\w-]+)(?::([^;]+))?;?)/,k=re("ngSrc,ngSrcset,src,srcset"),l=/^(?:(\^\^?)?(\?)?(\^\^?)?)?/,
+m=/^(on[a-z]+|formaction)$/,n=V();this.directive=function N(b,d){fb(b,"name");Ka(b,"directive");F(b)?(c(b),fb(d,"directiveFactory"),f.hasOwnProperty(b)||(f[b]=[],a.factory(b+"Directive",["$injector","$exceptionHandler",function(a,c){var d=[];q(f[b],function(f,g){try{var h=a.invoke(f);D(h)?h={compile:la(h)}:!h.compile&&h.link&&(h.compile=la(h.link));h.priority=h.priority||0;h.index=g;h.name=h.name||b;h.require=e(h);var k=h,l=h.restrict;if(l&&(!F(l)||!/[EACM]/.test(l)))throw fa("badrestrict",l,b);k.restrict=
+l||"EA";h.$$moduleName=f.$$moduleName;d.push(h)}catch(m){c(m)}});return d}])),f[b].push(d)):q(b,Lc(N));return this};this.component=function(a,b){function c(a){function e(b){return D(b)||H(b)?function(c,d){return a.invoke(b,this,{$element:c,$attrs:d})}:b}var f=b.template||b.templateUrl?b.template:"",g={controller:d,controllerAs:qg(b.controller)||b.controllerAs||"$ctrl",template:e(f),templateUrl:e(b.templateUrl),transclude:b.transclude,scope:{},bindToController:b.bindings||{},restrict:"E",require:b.require};
+q(b,function(a,b){"$"===b.charAt(0)&&(g[b]=a)});return g}var d=b.controller||function(){};q(b,function(a,b){"$"===b.charAt(0)&&(c[b]=a,D(d)&&(d[b]=a))});c.$inject=["$injector"];return this.directive(a,c)};this.aHrefSanitizationWhitelist=function(a){return u(a)?(b.aHrefSanitizationWhitelist(a),this):b.aHrefSanitizationWhitelist()};this.imgSrcSanitizationWhitelist=function(a){return u(a)?(b.imgSrcSanitizationWhitelist(a),this):b.imgSrcSanitizationWhitelist()};var p=!0;this.debugInfoEnabled=function(a){return u(a)?
+(p=a,this):p};var r=!1;this.preAssignBindingsEnabled=function(a){return u(a)?(r=a,this):r};var J=10;this.onChangesTtl=function(a){return arguments.length?(J=a,this):J};var v=!0;this.commentDirectivesEnabled=function(a){return arguments.length?(v=a,this):v};var t=!0;this.cssClassDirectivesEnabled=function(a){return arguments.length?(t=a,this):t};this.$get=["$injector","$interpolate","$exceptionHandler","$templateRequest","$parse","$controller","$rootScope","$sce","$animate","$$sanitizeUri",function(a,
+b,c,e,n,E,G,y,O,X){function P(){try{if(!--ya)throw ia=void 0,fa("infchng",J);G.$apply(function(){for(var a=[],b=0,c=ia.length;b<c;++b)try{ia[b]()}catch(d){a.push(d)}ia=void 0;if(a.length)throw a;})}finally{ya++}}function s(a,b){if(b){var c=Object.keys(b),d,e,f;d=0;for(e=c.length;d<e;d++)f=c[d],this[f]=b[f]}else this.$attr={};this.$$element=a}function R(a,b,c){ta.innerHTML="<span "+b+">";b=ta.firstChild.attributes;var d=b[0];b.removeNamedItem(d.name);d.value=c;a.attributes.setNamedItem(d)}function La(a,
+b){try{a.addClass(b)}catch(c){}}function ca(a,b,c,d,e){a instanceof B||(a=B(a));var f=Ma(a,b,a,c,d,e);ca.$$addScopeClass(a);var g=null;return function(b,c,d){if(!a)throw fa("multilink");fb(b,"scope");e&&e.needsNewScope&&(b=b.$parent.$new());d=d||{};var h=d.parentBoundTranscludeFn,k=d.transcludeControllers;d=d.futureParentElement;h&&h.$$boundTransclude&&(h=h.$$boundTransclude);g||(g=(d=d&&d[0])?"foreignobject"!==wa(d)&&ma.call(d).match(/SVG/)?"svg":"html":"html");d="html"!==g?B(ha(g,B("<div>").append(a).html())):
+c?Na.clone.call(a):a;if(k)for(var l in k)d.data("$"+l+"Controller",k[l].instance);ca.$$addScopeInfo(d,b);c&&c(d,b);f&&f(b,d,d,h);c||(a=f=null);return d}}function Ma(a,b,c,d,e,f){function g(a,c,d,e){var f,k,l,m,n,p,r;if(K)for(r=Array(c.length),m=0;m<h.length;m+=3)f=h[m],r[f]=c[f];else r=c;m=0;for(n=h.length;m<n;)k=r[h[m++]],c=h[m++],f=h[m++],c?(c.scope?(l=a.$new(),ca.$$addScopeInfo(B(k),l)):l=a,p=c.transcludeOnThisElement?ja(a,c.transclude,e):!c.templateOnThisElement&&e?e:!e&&b?ja(a,b):null,c(f,l,
+k,d,p)):f&&f(a,k.childNodes,void 0,e)}for(var h=[],k=H(a)||a instanceof B,l,m,n,p,K,r=0;r<a.length;r++){l=new s;11===za&&L(a,r,k);m=jc(a[r],[],l,0===r?d:void 0,e);(f=m.length?W(m,a[r],l,b,c,null,[],[],f):null)&&f.scope&&ca.$$addScopeClass(l.$$element);l=f&&f.terminal||!(n=a[r].childNodes)||!n.length?null:Ma(n,f?(f.transcludeOnThisElement||!f.templateOnThisElement)&&f.transclude:b);if(f||l)h.push(r,f,l),p=!0,K=K||f;f=null}return p?g:null}function L(a,b,c){var d=a[b],e=d.parentNode,f;if(d.nodeType===
+Ia)for(;;){f=e?d.nextSibling:a[b+1];if(!f||f.nodeType!==Ia)break;d.nodeValue+=f.nodeValue;f.parentNode&&f.parentNode.removeChild(f);c&&f===a[b+1]&&a.splice(b+1,1)}}function ja(a,b,c){function d(e,f,g,h,k){e||(e=a.$new(!1,k),e.$$transcluded=!0);return b(e,f,{parentBoundTranscludeFn:c,transcludeControllers:g,futureParentElement:h})}var e=d.$$slots=V(),f;for(f in b.$$slots)e[f]=b.$$slots[f]?ja(a,b.$$slots[f],c):null;return d}function jc(a,b,c,d,e){var f=c.$attr,g;switch(a.nodeType){case 1:g=wa(a);Y(b,
+Ba(g),"E",d,e);for(var k,l,m,n,p=a.attributes,K=0,r=p&&p.length;K<r;K++){var G=!1,E=!1;k=p[K];l=k.name;m=k.value;k=Ba(l);(n=Ja.test(k))&&(l=l.replace(md,"").substr(8).replace(/_(.)/g,function(a,b){return b.toUpperCase()}));(k=k.match(Ka))&&Z(k[1])&&(G=l,E=l.substr(0,l.length-5)+"end",l=l.substr(0,l.length-6));k=Ba(l.toLowerCase());f[k]=l;if(n||!c.hasOwnProperty(k))c[k]=m,id(a,k)&&(c[k]=!0);pa(a,b,m,k,n);Y(b,k,"A",d,e,G,E)}"input"===g&&"hidden"===a.getAttribute("type")&&a.setAttribute("autocomplete",
+"off");if(!Ga)break;f=a.className;C(f)&&(f=f.animVal);if(F(f)&&""!==f)for(;a=h.exec(f);)k=Ba(a[2]),Y(b,k,"C",d,e)&&(c[k]=T(a[3])),f=f.substr(a.index+a[0].length);break;case Ia:la(b,a.nodeValue);break;case 8:if(!Fa)break;jb(a,b,c,d,e)}b.sort(ea);return b}function jb(a,b,c,d,e){try{var f=g.exec(a.nodeValue);if(f){var h=Ba(f[1]);Y(b,h,"M",d,e)&&(c[h]=T(f[2]))}}catch(k){}}function nd(a,b,c){var d=[],e=0;if(b&&a.hasAttribute&&a.hasAttribute(b)){do{if(!a)throw fa("uterdir",b,c);1===a.nodeType&&(a.hasAttribute(b)&&
+e++,a.hasAttribute(c)&&e--);d.push(a);a=a.nextSibling}while(0<e)}else d.push(a);return B(d)}function od(a,b,c){return function(d,e,f,g,h){e=nd(e[0],b,c);return a(d,e,f,g,h)}}function kc(a,b,c,d,e,f){var g;return a?ca(b,c,d,e,f):function(){g||(g=ca(b,c,d,e,f),b=c=f=null);return g.apply(this,arguments)}}function W(a,b,d,e,f,g,h,k,l){function m(a,b,c,d){if(a){c&&(a=od(a,c,d));a.require=y.require;a.directiveName=P;if(E===y||y.$$isolateScope)a=qa(a,{isolateScope:!0});h.push(a)}if(b){c&&(b=od(b,c,d));b.require=
+y.require;b.directiveName=P;if(E===y||y.$$isolateScope)b=qa(b,{isolateScope:!0});k.push(b)}}function n(a,e,f,g,l){function m(a,b,c,d){var e;Za(a)||(d=c,c=b,b=a,a=void 0);X&&(e=O);c||(c=X?P.parent():P);if(d){var f=l.$$slots[d];if(f)return f(a,b,e,c,R);if(w(f))throw fa("noslot",d,xa(P));}else return l(a,b,e,c,R)}var p,y,t,v,J,O,N,P;b===f?(g=d,P=d.$$element):(P=B(f),g=new s(P,d));J=e;E?v=e.$new(!0):K&&(J=e.$parent);l&&(N=m,N.$$boundTransclude=l,N.isSlotFilled=function(a){return!!l.$$slots[a]});G&&(O=
+ba(P,g,N,G,v,e,E));E&&(ca.$$addScopeInfo(P,v,!0,!(I&&(I===E||I===E.$$originalDirective))),ca.$$addScopeClass(P,!0),v.$$isolateBindings=E.$$isolateBindings,y=na(e,g,v,v.$$isolateBindings,E),y.removeWatches&&v.$on("$destroy",y.removeWatches));for(p in O){y=G[p];t=O[p];var Hb=y.$$bindings.bindToController;if(r){t.bindingInfo=Hb?na(J,g,t.instance,Hb,y):{};var A=t();A!==t.instance&&(t.instance=A,P.data("$"+y.name+"Controller",A),t.bindingInfo.removeWatches&&t.bindingInfo.removeWatches(),t.bindingInfo=
+na(J,g,t.instance,Hb,y))}else t.instance=t(),P.data("$"+y.name+"Controller",t.instance),t.bindingInfo=na(J,g,t.instance,Hb,y)}q(G,function(a,b){var c=a.require;a.bindToController&&!H(c)&&C(c)&&S(O[b].instance,U(b,c,P,O))});q(O,function(a){var b=a.instance;if(D(b.$onChanges))try{b.$onChanges(a.bindingInfo.initialChanges)}catch(d){c(d)}if(D(b.$onInit))try{b.$onInit()}catch(e){c(e)}D(b.$doCheck)&&(J.$watch(function(){b.$doCheck()}),b.$doCheck());D(b.$onDestroy)&&J.$on("$destroy",function(){b.$onDestroy()})});
+p=0;for(y=h.length;p<y;p++)t=h[p],ra(t,t.isolateScope?v:e,P,g,t.require&&U(t.directiveName,t.require,P,O),N);var R=e;E&&(E.template||null===E.templateUrl)&&(R=v);a&&a(R,f.childNodes,void 0,l);for(p=k.length-1;0<=p;p--)t=k[p],ra(t,t.isolateScope?v:e,P,g,t.require&&U(t.directiveName,t.require,P,O),N);q(O,function(a){a=a.instance;D(a.$postLink)&&a.$postLink()})}l=l||{};for(var p=-Number.MAX_VALUE,K=l.newScopeDirective,G=l.controllerDirectives,E=l.newIsolateScopeDirective,I=l.templateDirective,t=l.nonTlbTranscludeDirective,
+J=!1,O=!1,X=l.hasElementTranscludeDirective,v=d.$$element=B(b),y,P,N,A=e,R,u=!1,La=!1,x,z=0,F=a.length;z<F;z++){y=a[z];var Ma=y.$$start,L=y.$$end;Ma&&(v=nd(b,Ma,L));N=void 0;if(p>y.priority)break;if(x=y.scope)y.templateUrl||(C(x)?($("new/isolated scope",E||K,y,v),E=y):$("new/isolated scope",E,y,v)),K=K||y;P=y.name;if(!u&&(y.replace&&(y.templateUrl||y.template)||y.transclude&&!y.$$tlb)){for(x=z+1;u=a[x++];)if(u.transclude&&!u.$$tlb||u.replace&&(u.templateUrl||u.template)){La=!0;break}u=!0}!y.templateUrl&&
+y.controller&&(G=G||V(),$("'"+P+"' controller",G[P],y,v),G[P]=y);if(x=y.transclude)if(J=!0,y.$$tlb||($("transclusion",t,y,v),t=y),"element"===x)X=!0,p=y.priority,N=v,v=d.$$element=B(ca.$$createComment(P,d[P])),b=v[0],ka(f,va.call(N,0),b),N[0].$$parentNode=N[0].parentNode,A=kc(La,N,e,p,g&&g.name,{nonTlbTranscludeDirective:t});else{var ja=V();if(C(x)){N=[];var Q=V(),jb=V();q(x,function(a,b){var c="?"===a.charAt(0);a=c?a.substring(1):a;Q[a]=b;ja[b]=null;jb[b]=c});q(v.contents(),function(a){var b=Q[Ba(wa(a))];
+b?(jb[b]=!0,ja[b]=ja[b]||[],ja[b].push(a)):N.push(a)});q(jb,function(a,b){if(!a)throw fa("reqslot",b);});for(var ic in ja)ja[ic]&&(ja[ic]=kc(La,ja[ic],e))}else N=B(fc(b)).contents();v.empty();A=kc(La,N,e,void 0,void 0,{needsNewScope:y.$$isolateScope||y.$$newScope});A.$$slots=ja}if(y.template)if(O=!0,$("template",I,y,v),I=y,x=D(y.template)?y.template(v,d):y.template,x=Ea(x),y.replace){g=y;N=cc.test(x)?pd(ha(y.templateNamespace,T(x))):[];b=N[0];if(1!==N.length||1!==b.nodeType)throw fa("tplrt",P,"");
+ka(f,v,b);F={$attr:{}};x=jc(b,[],F);var Y=a.splice(z+1,a.length-(z+1));(E||K)&&aa(x,E,K);a=a.concat(x).concat(Y);da(d,F);F=a.length}else v.html(x);if(y.templateUrl)O=!0,$("template",I,y,v),I=y,y.replace&&(g=y),n=ga(a.splice(z,a.length-z),v,d,f,J&&A,h,k,{controllerDirectives:G,newScopeDirective:K!==y&&K,newIsolateScopeDirective:E,templateDirective:I,nonTlbTranscludeDirective:t}),F=a.length;else if(y.compile)try{R=y.compile(v,d,A);var Z=y.$$originalDirective||y;D(R)?m(null,bb(Z,R),Ma,L):R&&m(bb(Z,R.pre),
+bb(Z,R.post),Ma,L)}catch(ea){c(ea,xa(v))}y.terminal&&(n.terminal=!0,p=Math.max(p,y.priority))}n.scope=K&&!0===K.scope;n.transcludeOnThisElement=J;n.templateOnThisElement=O;n.transclude=A;l.hasElementTranscludeDirective=X;return n}function U(a,b,c,d){var e;if(F(b)){var f=b.match(l);b=b.substring(f[0].length);var g=f[1]||f[3],f="?"===f[2];"^^"===g?c=c.parent():e=(e=d&&d[b])&&e.instance;if(!e){var h="$"+b+"Controller";e=g?c.inheritedData(h):c.data(h)}if(!e&&!f)throw fa("ctreq",b,a);}else if(H(b))for(e=
+[],g=0,f=b.length;g<f;g++)e[g]=U(a,b[g],c,d);else C(b)&&(e={},q(b,function(b,f){e[f]=U(a,b,c,d)}));return e||null}function ba(a,b,c,d,e,f,g){var h=V(),k;for(k in d){var l=d[k],m={$scope:l===g||l.$$isolateScope?e:f,$element:a,$attrs:b,$transclude:c},n=l.controller;"@"===n&&(n=b[l.name]);m=E(n,m,!0,l.controllerAs);h[l.name]=m;a.data("$"+l.name+"Controller",m.instance)}return h}function aa(a,b,c){for(var d=0,e=a.length;d<e;d++)a[d]=Vb(a[d],{$$isolateScope:b,$$newScope:c})}function Y(b,c,e,g,h,k,l){if(c===
+h)return null;var m=null;if(f.hasOwnProperty(c)){h=a.get(c+"Directive");for(var n=0,p=h.length;n<p;n++)if(c=h[n],(w(g)||g>c.priority)&&-1!==c.restrict.indexOf(e)){k&&(c=Vb(c,{$$start:k,$$end:l}));if(!c.$$bindings){var K=m=c,r=c.name,t={isolateScope:null,bindToController:null};C(K.scope)&&(!0===K.bindToController?(t.bindToController=d(K.scope,r,!0),t.isolateScope={}):t.isolateScope=d(K.scope,r,!1));C(K.bindToController)&&(t.bindToController=d(K.bindToController,r,!0));if(t.bindToController&&!K.controller)throw fa("noctrl",
+r);m=m.$$bindings=t;C(m.isolateScope)&&(c.$$isolateBindings=m.isolateScope)}b.push(c);m=c}}return m}function Z(b){if(f.hasOwnProperty(b))for(var c=a.get(b+"Directive"),d=0,e=c.length;d<e;d++)if(b=c[d],b.multiElement)return!0;return!1}function da(a,b){var c=b.$attr,d=a.$attr;q(a,function(d,e){"$"!==e.charAt(0)&&(b[e]&&b[e]!==d&&(d=d.length?d+(("style"===e?";":" ")+b[e]):b[e]),a.$set(e,d,!0,c[e]))});q(b,function(b,e){a.hasOwnProperty(e)||"$"===e.charAt(0)||(a[e]=b,"class"!==e&&"style"!==e&&(d[e]=c[e]))})}
+function ga(a,b,d,f,g,h,k,l){var m=[],n,p,K=b[0],r=a.shift(),t=Vb(r,{templateUrl:null,transclude:null,replace:null,$$originalDirective:r}),y=D(r.templateUrl)?r.templateUrl(b,d):r.templateUrl,E=r.templateNamespace;b.empty();e(y).then(function(c){var e,G;c=Ea(c);if(r.replace){c=cc.test(c)?pd(ha(E,T(c))):[];e=c[0];if(1!==c.length||1!==e.nodeType)throw fa("tplrt",r.name,y);c={$attr:{}};ka(f,b,e);var I=jc(e,[],c);C(r.scope)&&aa(I,!0);a=I.concat(a);da(d,c)}else e=K,b.html(c);a.unshift(t);n=W(a,e,d,g,b,
+r,h,k,l);q(f,function(a,c){a===e&&(f[c]=b[0])});for(p=Ma(b[0].childNodes,g);m.length;){c=m.shift();G=m.shift();var v=m.shift(),J=m.shift(),I=b[0];if(!c.$$destroyed){if(G!==K){var O=G.className;l.hasElementTranscludeDirective&&r.replace||(I=fc(e));ka(v,B(G),I);La(B(I),O)}G=n.transcludeOnThisElement?ja(c,n.transclude,J):J;n(p,c,I,f,G)}}m=null}).catch(function(a){a instanceof Error&&c(a)});return function(a,b,c,d,e){a=e;b.$$destroyed||(m?m.push(b,c,d,a):(n.transcludeOnThisElement&&(a=ja(b,n.transclude,
+e)),n(p,b,c,d,a)))}}function ea(a,b){var c=b.priority-a.priority;return 0!==c?c:a.name!==b.name?a.name<b.name?-1:1:a.index-b.index}function $(a,b,c,d){function e(a){return a?" (module: "+a+")":""}if(b)throw fa("multidir",b.name,e(b.$$moduleName),c.name,e(c.$$moduleName),a,xa(d));}function la(a,c){var d=b(c,!0);d&&a.push({priority:0,compile:function(a){a=a.parent();var b=!!a.length;b&&ca.$$addBindingClass(a);return function(a,c){var e=c.parent();b||ca.$$addBindingClass(e);ca.$$addBindingInfo(e,d.expressions);
+a.$watch(d,function(a){c[0].nodeValue=a})}}})}function ha(a,b){a=Q(a||"html");switch(a){case "svg":case "math":var c=x.document.createElement("div");c.innerHTML="<"+a+">"+b+"</"+a+">";return c.childNodes[0].childNodes;default:return b}}function oa(a,b){if("srcdoc"===b)return y.HTML;var c=wa(a);if("src"===b||"ngSrc"===b){if(-1===["img","video","audio","source","track"].indexOf(c))return y.RESOURCE_URL}else if("xlinkHref"===b||"form"===c&&"action"===b||"link"===c&&"href"===b)return y.RESOURCE_URL}function pa(a,
+c,d,e,f){var g=oa(a,e),h=k[e]||f,l=b(d,!f,g,h);if(l){if("multiple"===e&&"select"===wa(a))throw fa("selmulti",xa(a));if(m.test(e))throw fa("nodomevents");c.push({priority:100,compile:function(){return{pre:function(a,c,f){c=f.$$observers||(f.$$observers=V());var k=f[e];k!==d&&(l=k&&b(k,!0,g,h),d=k);l&&(f[e]=l(a),(c[e]||(c[e]=[])).$$inter=!0,(f.$$observers&&f.$$observers[e].$$scope||a).$watch(l,function(a,b){"class"===e&&a!==b?f.$updateClass(a,b):f.$set(e,a)}))}}}})}}function ka(a,b,c){var d=b[0],e=
+b.length,f=d.parentNode,g,h;if(a)for(g=0,h=a.length;g<h;g++)if(a[g]===d){a[g++]=c;h=g+e-1;for(var k=a.length;g<k;g++,h++)h<k?a[g]=a[h]:delete a[g];a.length-=e-1;a.context===d&&(a.context=c);break}f&&f.replaceChild(c,d);a=x.document.createDocumentFragment();for(g=0;g<e;g++)a.appendChild(b[g]);B.hasData(d)&&(B.data(c,B.data(d)),B(d).off("$destroy"));B.cleanData(a.querySelectorAll("*"));for(g=1;g<e;g++)delete b[g];b[0]=c;b.length=1}function qa(a,b){return S(function(){return a.apply(null,arguments)},
+a,b)}function ra(a,b,d,e,f,g){try{a(b,d,e,f,g)}catch(h){c(h,xa(d))}}function na(a,c,d,e,f){function g(b,c,e){D(d.$onChanges)&&!Xb(c,e)&&(ia||(a.$$postDigest(P),ia=[]),m||(m={},ia.push(h)),m[b]&&(e=m[b].previousValue),m[b]=new Ib(e,c))}function h(){d.$onChanges(m);m=void 0}var k=[],l={},m;q(e,function(e,h){var m=e.attrName,p=e.optional,r,t,y,G;switch(e.mode){case "@":p||ua.call(c,m)||(d[h]=c[m]=void 0);p=c.$observe(m,function(a){if(F(a)||Ha(a))g(h,a,d[h]),d[h]=a});c.$$observers[m].$$scope=a;r=c[m];
+F(r)?d[h]=b(r)(a):Ha(r)&&(d[h]=r);l[h]=new Ib(lc,d[h]);k.push(p);break;case "=":if(!ua.call(c,m)){if(p)break;c[m]=void 0}if(p&&!c[m])break;t=n(c[m]);G=t.literal?sa:Xb;y=t.assign||function(){r=d[h]=t(a);throw fa("nonassign",c[m],m,f.name);};r=d[h]=t(a);p=function(b){G(b,d[h])||(G(b,r)?y(a,b=d[h]):d[h]=b);return r=b};p.$stateful=!0;p=e.collection?a.$watchCollection(c[m],p):a.$watch(n(c[m],p),null,t.literal);k.push(p);break;case "<":if(!ua.call(c,m)){if(p)break;c[m]=void 0}if(p&&!c[m])break;t=n(c[m]);
+var E=t.literal,I=d[h]=t(a);l[h]=new Ib(lc,d[h]);p=a.$watch(t,function(a,b){if(b===a){if(b===I||E&&sa(b,I))return;b=I}g(h,a,b);d[h]=a},E);k.push(p);break;case "&":t=c.hasOwnProperty(m)?n(c[m]):z;if(t===z&&p)break;d[h]=function(b){return t(a,b)}}});return{initialChanges:l,removeWatches:k.length&&function(){for(var a=0,b=k.length;a<b;++a)k[a]()}}}var Ca=/^\w/,ta=x.document.createElement("div"),Fa=v,Ga=t,ya=J,ia;s.prototype={$normalize:Ba,$addClass:function(a){a&&0<a.length&&O.addClass(this.$$element,
+a)},$removeClass:function(a){a&&0<a.length&&O.removeClass(this.$$element,a)},$updateClass:function(a,b){var c=qd(a,b);c&&c.length&&O.addClass(this.$$element,c);(c=qd(b,a))&&c.length&&O.removeClass(this.$$element,c)},$set:function(a,b,d,e){var f=id(this.$$element[0],a),g=rd[a],h=a;f?(this.$$element.prop(a,b),e=f):g&&(this[g]=b,h=g);this[a]=b;e?this.$attr[a]=e:(e=this.$attr[a])||(this.$attr[a]=e=Tc(a,"-"));f=wa(this.$$element);if("a"===f&&("href"===a||"xlinkHref"===a)||"img"===f&&"src"===a)this[a]=
+b=X(b,"src"===a);else if("img"===f&&"srcset"===a&&u(b)){for(var f="",g=T(b),k=/(\s+\d+x\s*,|\s+\d+w\s*,|\s+,|,\s+)/,k=/\s/.test(g)?k:/(,)/,g=g.split(k),k=Math.floor(g.length/2),l=0;l<k;l++)var m=2*l,f=f+X(T(g[m]),!0),f=f+(" "+T(g[m+1]));g=T(g[2*l]).split(/\s/);f+=X(T(g[0]),!0);2===g.length&&(f+=" "+T(g[1]));this[a]=b=f}!1!==d&&(null===b||w(b)?this.$$element.removeAttr(e):Ca.test(e)?this.$$element.attr(e,b):R(this.$$element[0],e,b));(a=this.$$observers)&&q(a[h],function(a){try{a(b)}catch(d){c(d)}})},
+$observe:function(a,b){var c=this,d=c.$$observers||(c.$$observers=V()),e=d[a]||(d[a]=[]);e.push(b);G.$evalAsync(function(){e.$$inter||!c.hasOwnProperty(a)||w(c[a])||b(c[a])});return function(){$a(e,b)}}};var Aa=b.startSymbol(),Da=b.endSymbol(),Ea="{{"===Aa&&"}}"===Da?Ya:function(a){return a.replace(/\{\{/g,Aa).replace(/}}/g,Da)},Ja=/^ngAttr[A-Z]/,Ka=/^(.+)Start$/;ca.$$addBindingInfo=p?function(a,b){var c=a.data("$binding")||[];H(b)?c=c.concat(b):c.push(b);a.data("$binding",c)}:z;ca.$$addBindingClass=
+p?function(a){La(a,"ng-binding")}:z;ca.$$addScopeInfo=p?function(a,b,c,d){a.data(c?d?"$isolateScopeNoTemplate":"$isolateScope":"$scope",b)}:z;ca.$$addScopeClass=p?function(a,b){La(a,b?"ng-isolate-scope":"ng-scope")}:z;ca.$$createComment=function(a,b){var c="";p&&(c=" "+(a||"")+": ",b&&(c+=b+" "));return x.document.createComment(c)};return ca}]}function Ib(a,b){this.previousValue=a;this.currentValue=b}function Ba(a){return a.replace(md,"").replace(rg,gb)}function qd(a,b){var d="",c=a.split(/\s+/),
+e=b.split(/\s+/),f=0;a:for(;f<c.length;f++){for(var g=c[f],h=0;h<e.length;h++)if(g===e[h])continue a;d+=(0<d.length?" ":"")+g}return d}function pd(a){a=B(a);var b=a.length;if(1>=b)return a;for(;b--;){var d=a[b];(8===d.nodeType||d.nodeType===Ia&&""===d.nodeValue.trim())&&sg.call(a,b,1)}return a}function qg(a,b){if(b&&F(b))return b;if(F(a)){var d=sd.exec(a);if(d)return d[3]}}function wf(){var a={},b=!1;this.has=function(b){return a.hasOwnProperty(b)};this.register=function(b,c){Ka(b,"controller");C(b)?
+S(a,b):a[b]=c};this.allowGlobals=function(){b=!0};this.$get=["$injector","$window",function(d,c){function e(a,b,c,d){if(!a||!C(a.$scope))throw L("$controller")("noscp",d,b);a.$scope[b]=c}return function(f,g,h,k){var l,m,n;h=!0===h;k&&F(k)&&(n=k);if(F(f)){k=f.match(sd);if(!k)throw td("ctrlfmt",f);m=k[1];n=n||k[3];f=a.hasOwnProperty(m)?a[m]:Vc(g.$scope,m,!0)||(b?Vc(c,m,!0):void 0);if(!f)throw td("ctrlreg",m);sb(f,m,!0)}if(h)return h=(H(f)?f[f.length-1]:f).prototype,l=Object.create(h||null),n&&e(g,n,
+l,m||f.name),S(function(){var a=d.invoke(f,l,g,m);a!==l&&(C(a)||D(a))&&(l=a,n&&e(g,n,l,m||f.name));return l},{instance:l,identifier:n});l=d.instantiate(f,g,m);n&&e(g,n,l,m||f.name);return l}}]}function xf(){this.$get=["$window",function(a){return B(a.document)}]}function yf(){this.$get=["$document","$rootScope",function(a,b){function d(){e=c.hidden}var c=a[0],e=c&&c.hidden;a.on("visibilitychange",d);b.$on("$destroy",function(){a.off("visibilitychange",d)});return function(){return e}}]}function zf(){this.$get=
+["$log",function(a){return function(b,d){a.error.apply(a,arguments)}}]}function mc(a){return C(a)?ga(a)?a.toISOString():cb(a):a}function Ef(){this.$get=function(){return function(a){if(!a)return"";var b=[];Kc(a,function(a,c){null===a||w(a)||(H(a)?q(a,function(a){b.push($(c)+"="+$(mc(a)))}):b.push($(c)+"="+$(mc(a))))});return b.join("&")}}}function Ff(){this.$get=function(){return function(a){function b(a,e,f){null===a||w(a)||(H(a)?q(a,function(a,c){b(a,e+"["+(C(a)?c:"")+"]")}):C(a)&&!ga(a)?Kc(a,function(a,
+c){b(a,e+(f?"":"[")+c+(f?"":"]"))}):d.push($(e)+"="+$(mc(a))))}if(!a)return"";var d=[];b(a,"",!0);return d.join("&")}}}function nc(a,b){if(F(a)){var d=a.replace(tg,"").trim();if(d){var c=b("Content-Type");(c=c&&0===c.indexOf(ud))||(c=(c=d.match(ug))&&vg[c[0]].test(d));if(c)try{a=Oc(d)}catch(e){throw oc("baddata",a,e);}}}return a}function vd(a){var b=V(),d;F(a)?q(a.split("\n"),function(a){d=a.indexOf(":");var e=Q(T(a.substr(0,d)));a=T(a.substr(d+1));e&&(b[e]=b[e]?b[e]+", "+a:a)}):C(a)&&q(a,function(a,
+d){var f=Q(d),g=T(a);f&&(b[f]=b[f]?b[f]+", "+g:g)});return b}function wd(a){var b;return function(d){b||(b=vd(a));return d?(d=b[Q(d)],void 0===d&&(d=null),d):b}}function xd(a,b,d,c){if(D(c))return c(a,b,d);q(c,function(c){a=c(a,b,d)});return a}function Df(){var a=this.defaults={transformResponse:[nc],transformRequest:[function(a){return C(a)&&"[object File]"!==ma.call(a)&&"[object Blob]"!==ma.call(a)&&"[object FormData]"!==ma.call(a)?cb(a):a}],headers:{common:{Accept:"application/json, text/plain, */*"},
+post:pa(pc),put:pa(pc),patch:pa(pc)},xsrfCookieName:"XSRF-TOKEN",xsrfHeaderName:"X-XSRF-TOKEN",paramSerializer:"$httpParamSerializer",jsonpCallbackParam:"callback"},b=!1;this.useApplyAsync=function(a){return u(a)?(b=!!a,this):b};var d=this.interceptors=[];this.$get=["$browser","$httpBackend","$$cookieReader","$cacheFactory","$rootScope","$q","$injector","$sce",function(c,e,f,g,h,k,l,m){function n(b){function d(a,b){for(var c=0,e=b.length;c<e;){var f=b[c++],g=b[c++];a=a.then(f,g)}b.length=0;return a}
+function e(a,b){var c,d={};q(a,function(a,e){D(a)?(c=a(b),null!=c&&(d[e]=c)):d[e]=a});return d}function f(a){var b=S({},a);b.data=xd(a.data,a.headers,a.status,g.transformResponse);a=a.status;return 200<=a&&300>a?b:k.reject(b)}if(!C(b))throw L("$http")("badreq",b);if(!F(m.valueOf(b.url)))throw L("$http")("badreq",b.url);var g=S({method:"get",transformRequest:a.transformRequest,transformResponse:a.transformResponse,paramSerializer:a.paramSerializer,jsonpCallbackParam:a.jsonpCallbackParam},b);g.headers=
+function(b){var c=a.headers,d=S({},b.headers),f,g,h,c=S({},c.common,c[Q(b.method)]);a:for(f in c){g=Q(f);for(h in d)if(Q(h)===g)continue a;d[f]=c[f]}return e(d,pa(b))}(b);g.method=ub(g.method);g.paramSerializer=F(g.paramSerializer)?l.get(g.paramSerializer):g.paramSerializer;c.$$incOutstandingRequestCount();var h=[],n=[];b=k.resolve(g);q(t,function(a){(a.request||a.requestError)&&h.unshift(a.request,a.requestError);(a.response||a.responseError)&&n.push(a.response,a.responseError)});b=d(b,h);b=b.then(function(b){var c=
+b.headers,d=xd(b.data,wd(c),void 0,b.transformRequest);w(d)&&q(c,function(a,b){"content-type"===Q(b)&&delete c[b]});w(b.withCredentials)&&!w(a.withCredentials)&&(b.withCredentials=a.withCredentials);return p(b,d).then(f,f)});b=d(b,n);return b=b.finally(function(){c.$$completeOutstandingRequest(z)})}function p(c,d){function g(a){if(a){var c={};q(a,function(a,d){c[d]=function(c){function d(){a(c)}b?h.$applyAsync(d):h.$$phase?d():h.$apply(d)}});return c}}function l(a,c,d,e){function f(){p(c,a,d,e)}O&&
+(200<=a&&300>a?O.put(R,[a,c,vd(d),e]):O.remove(R));b?h.$applyAsync(f):(f(),h.$$phase||h.$apply())}function p(a,b,d,e){b=-1<=b?b:0;(200<=b&&300>b?G.resolve:G.reject)({data:a,status:b,headers:wd(d),config:c,statusText:e})}function K(a){p(a.data,a.status,pa(a.headers()),a.statusText)}function t(){var a=n.pendingRequests.indexOf(c);-1!==a&&n.pendingRequests.splice(a,1)}var G=k.defer(),y=G.promise,O,X,P=c.headers,s="jsonp"===Q(c.method),R=c.url;s?R=m.getTrustedResourceUrl(R):F(R)||(R=m.valueOf(R));R=r(R,
+c.paramSerializer(c.params));s&&(R=J(R,c.jsonpCallbackParam));n.pendingRequests.push(c);y.then(t,t);!c.cache&&!a.cache||!1===c.cache||"GET"!==c.method&&"JSONP"!==c.method||(O=C(c.cache)?c.cache:C(a.cache)?a.cache:v);O&&(X=O.get(R),u(X)?X&&D(X.then)?X.then(K,K):H(X)?p(X[1],X[0],pa(X[2]),X[3]):p(X,200,{},"OK"):O.put(R,y));w(X)&&((X=yd(c.url)?f()[c.xsrfCookieName||a.xsrfCookieName]:void 0)&&(P[c.xsrfHeaderName||a.xsrfHeaderName]=X),e(c.method,R,d,l,P,c.timeout,c.withCredentials,c.responseType,g(c.eventHandlers),
+g(c.uploadEventHandlers)));return y}function r(a,b){0<b.length&&(a+=(-1===a.indexOf("?")?"?":"&")+b);return a}function J(a,b){if(/[&?][^=]+=JSON_CALLBACK/.test(a))throw oc("badjsonp",a);if((new RegExp("[&?]"+b+"=")).test(a))throw oc("badjsonp",b,a);return a+=(-1===a.indexOf("?")?"?":"&")+b+"=JSON_CALLBACK"}var v=g("$http");a.paramSerializer=F(a.paramSerializer)?l.get(a.paramSerializer):a.paramSerializer;var t=[];q(d,function(a){t.unshift(F(a)?l.get(a):l.invoke(a))});n.pendingRequests=[];(function(a){q(arguments,
+function(a){n[a]=function(b,c){return n(S({},c||{},{method:a,url:b}))}})})("get","delete","head","jsonp");(function(a){q(arguments,function(a){n[a]=function(b,c,d){return n(S({},d||{},{method:a,url:b,data:c}))}})})("post","put","patch");n.defaults=a;return n}]}function Hf(){this.$get=function(){return function(){return new x.XMLHttpRequest}}}function Gf(){this.$get=["$browser","$jsonpCallbacks","$document","$xhrFactory",function(a,b,d,c){return wg(a,c,a.defer,b,d[0])}]}function wg(a,b,d,c,e){function f(a,
+b,d){a=a.replace("JSON_CALLBACK",b);var f=e.createElement("script"),m=null;f.type="text/javascript";f.src=a;f.async=!0;m=function(a){f.removeEventListener("load",m);f.removeEventListener("error",m);e.body.removeChild(f);f=null;var g=-1,r="unknown";a&&("load"!==a.type||c.wasCalled(b)||(a={type:"error"}),r=a.type,g="error"===a.type?404:200);d&&d(g,r)};f.addEventListener("load",m);f.addEventListener("error",m);e.body.appendChild(f);return m}return function(e,h,k,l,m,n,p,r,J,v){function t(){N&&N();A&&
+A.abort()}h=h||a.url();if("jsonp"===Q(e))var M=c.createCallback(h),N=f(h,M,function(a,b){var e=200===a&&c.getResponse(M);u(I)&&d.cancel(I);N=A=null;l(a,e,"",b);c.removeCallback(M)});else{var A=b(e,h);A.open(e,h,!0);q(m,function(a,b){u(a)&&A.setRequestHeader(b,a)});A.onload=function(){var a=A.statusText||"",b="response"in A?A.response:A.responseText,c=1223===A.status?204:A.status;0===c&&(c=b?200:"file"===Ca(h).protocol?404:0);var e=A.getAllResponseHeaders();u(I)&&d.cancel(I);N=A=null;l(c,b,e,a)};e=
+function(){u(I)&&d.cancel(I);N=A=null;l(-1,null,null,"")};A.onerror=e;A.onabort=e;A.ontimeout=e;q(J,function(a,b){A.addEventListener(b,a)});q(v,function(a,b){A.upload.addEventListener(b,a)});p&&(A.withCredentials=!0);if(r)try{A.responseType=r}catch(s){if("json"!==r)throw s;}A.send(w(k)?null:k)}if(0<n)var I=d(t,n);else n&&D(n.then)&&n.then(t)}}function Bf(){var a="{{",b="}}";this.startSymbol=function(b){return b?(a=b,this):a};this.endSymbol=function(a){return a?(b=a,this):b};this.$get=["$parse","$exceptionHandler",
+"$sce",function(d,c,e){function f(a){return"\\\\\\"+a}function g(c){return c.replace(n,a).replace(p,b)}function h(a,b,c,d){var e=a.$watch(function(a){e();return d(a)},b,c);return e}function k(f,k,n,p){function M(a){try{var b=a;a=n?e.getTrusted(n,b):e.valueOf(b);return p&&!u(a)?a:$b(a)}catch(d){c(Da.interr(f,d))}}if(!f.length||-1===f.indexOf(a)){var q;k||(k=g(f),q=la(k),q.exp=f,q.expressions=[],q.$$watchDelegate=h);return q}p=!!p;var A,s,I=0,K=[],E=[];q=f.length;for(var G=[],y=[];I<q;)if(-1!==(A=f.indexOf(a,
+I))&&-1!==(s=f.indexOf(b,A+l)))I!==A&&G.push(g(f.substring(I,A))),I=f.substring(A+l,s),K.push(I),E.push(d(I,M)),I=s+m,y.push(G.length),G.push("");else{I!==q&&G.push(g(f.substring(I)));break}n&&1<G.length&&Da.throwNoconcat(f);if(!k||K.length){var O=function(a){for(var b=0,c=K.length;b<c;b++){if(p&&w(a[b]))return;G[y[b]]=a[b]}return G.join("")};return S(function(a){var b=0,d=K.length,e=Array(d);try{for(;b<d;b++)e[b]=E[b](a);return O(e)}catch(g){c(Da.interr(f,g))}},{exp:f,expressions:K,$$watchDelegate:function(a,
+b){var c;return a.$watchGroup(E,function(d,e){var f=O(d);D(b)&&b.call(this,f,d!==e?c:f,a);c=f})}})}}var l=a.length,m=b.length,n=new RegExp(a.replace(/./g,f),"g"),p=new RegExp(b.replace(/./g,f),"g");k.startSymbol=function(){return a};k.endSymbol=function(){return b};return k}]}function Cf(){this.$get=["$rootScope","$window","$q","$$q","$browser",function(a,b,d,c,e){function f(f,k,l,m){function n(){p?f.apply(null,r):f(t)}var p=4<arguments.length,r=p?va.call(arguments,4):[],J=b.setInterval,v=b.clearInterval,
+t=0,M=u(m)&&!m,q=(M?c:d).defer(),A=q.promise;l=u(l)?l:0;A.$$intervalId=J(function(){M?e.defer(n):a.$evalAsync(n);q.notify(t++);0<l&&t>=l&&(q.resolve(t),v(A.$$intervalId),delete g[A.$$intervalId]);M||a.$apply()},k);g[A.$$intervalId]=q;return A}var g={};f.cancel=function(a){return a&&a.$$intervalId in g?(g[a.$$intervalId].promise.catch(z),g[a.$$intervalId].reject("canceled"),b.clearInterval(a.$$intervalId),delete g[a.$$intervalId],!0):!1};return f}]}function qc(a){a=a.split("/");for(var b=a.length;b--;)a[b]=
+db(a[b]);return a.join("/")}function zd(a,b){var d=Ca(a);b.$$protocol=d.protocol;b.$$host=d.hostname;b.$$port=Z(d.port)||xg[d.protocol]||null}function Ad(a,b){if(yg.test(a))throw kb("badpath",a);var d="/"!==a.charAt(0);d&&(a="/"+a);var c=Ca(a);b.$$path=decodeURIComponent(d&&"/"===c.pathname.charAt(0)?c.pathname.substring(1):c.pathname);b.$$search=Rc(c.search);b.$$hash=decodeURIComponent(c.hash);b.$$path&&"/"!==b.$$path.charAt(0)&&(b.$$path="/"+b.$$path)}function rc(a,b){return a.slice(0,b.length)===
+b}function ka(a,b){if(rc(b,a))return b.substr(a.length)}function Aa(a){var b=a.indexOf("#");return-1===b?a:a.substr(0,b)}function lb(a){return a.replace(/(#.+)|#$/,"$1")}function sc(a,b,d){this.$$html5=!0;d=d||"";zd(a,this);this.$$parse=function(a){var d=ka(b,a);if(!F(d))throw kb("ipthprfx",a,b);Ad(d,this);this.$$path||(this.$$path="/");this.$$compose()};this.$$compose=function(){var a=Zb(this.$$search),d=this.$$hash?"#"+db(this.$$hash):"";this.$$url=qc(this.$$path)+(a?"?"+a:"")+d;this.$$absUrl=b+
+this.$$url.substr(1);this.$$urlUpdatedByLocation=!0};this.$$parseLinkUrl=function(c,e){if(e&&"#"===e[0])return this.hash(e.slice(1)),!0;var f,g;u(f=ka(a,c))?(g=f,g=d&&u(f=ka(d,f))?b+(ka("/",f)||f):a+g):u(f=ka(b,c))?g=b+f:b===c+"/"&&(g=b);g&&this.$$parse(g);return!!g}}function tc(a,b,d){zd(a,this);this.$$parse=function(c){var e=ka(a,c)||ka(b,c),f;w(e)||"#"!==e.charAt(0)?this.$$html5?f=e:(f="",w(e)&&(a=c,this.replace())):(f=ka(d,e),w(f)&&(f=e));Ad(f,this);c=this.$$path;var e=a,g=/^\/[A-Z]:(\/.*)/;rc(f,
+e)&&(f=f.replace(e,""));g.exec(f)||(c=(f=g.exec(c))?f[1]:c);this.$$path=c;this.$$compose()};this.$$compose=function(){var b=Zb(this.$$search),e=this.$$hash?"#"+db(this.$$hash):"";this.$$url=qc(this.$$path)+(b?"?"+b:"")+e;this.$$absUrl=a+(this.$$url?d+this.$$url:"");this.$$urlUpdatedByLocation=!0};this.$$parseLinkUrl=function(b,d){return Aa(a)===Aa(b)?(this.$$parse(b),!0):!1}}function Bd(a,b,d){this.$$html5=!0;tc.apply(this,arguments);this.$$parseLinkUrl=function(c,e){if(e&&"#"===e[0])return this.hash(e.slice(1)),
+!0;var f,g;a===Aa(c)?f=c:(g=ka(b,c))?f=a+d+g:b===c+"/"&&(f=b);f&&this.$$parse(f);return!!f};this.$$compose=function(){var b=Zb(this.$$search),e=this.$$hash?"#"+db(this.$$hash):"";this.$$url=qc(this.$$path)+(b?"?"+b:"")+e;this.$$absUrl=a+d+this.$$url;this.$$urlUpdatedByLocation=!0}}function Jb(a){return function(){return this[a]}}function Cd(a,b){return function(d){if(w(d))return this[a];this[a]=b(d);this.$$compose();return this}}function Jf(){var a="!",b={enabled:!1,requireBase:!0,rewriteLinks:!0};
+this.hashPrefix=function(b){return u(b)?(a=b,this):a};this.html5Mode=function(a){if(Ha(a))return b.enabled=a,this;if(C(a)){Ha(a.enabled)&&(b.enabled=a.enabled);Ha(a.requireBase)&&(b.requireBase=a.requireBase);if(Ha(a.rewriteLinks)||F(a.rewriteLinks))b.rewriteLinks=a.rewriteLinks;return this}return b};this.$get=["$rootScope","$browser","$sniffer","$rootElement","$window",function(d,c,e,f,g){function h(a,b,d){var e=l.url(),f=l.$$state;try{c.url(a,b,d),l.$$state=c.state()}catch(g){throw l.url(e),l.$$state=
+f,g;}}function k(a,b){d.$broadcast("$locationChangeSuccess",l.absUrl(),a,l.$$state,b)}var l,m;m=c.baseHref();var n=c.url(),p;if(b.enabled){if(!m&&b.requireBase)throw kb("nobase");p=n.substring(0,n.indexOf("/",n.indexOf("//")+2))+(m||"/");m=e.history?sc:Bd}else p=Aa(n),m=tc;var r=p.substr(0,Aa(p).lastIndexOf("/")+1);l=new m(p,r,"#"+a);l.$$parseLinkUrl(n,n);l.$$state=c.state();var J=/^\s*(javascript|mailto):/i;f.on("click",function(a){var e=b.rewriteLinks;if(e&&!a.ctrlKey&&!a.metaKey&&!a.shiftKey&&
+2!==a.which&&2!==a.button){for(var h=B(a.target);"a"!==wa(h[0]);)if(h[0]===f[0]||!(h=h.parent())[0])return;if(!F(e)||!w(h.attr(e))){var e=h.prop("href"),k=h.attr("href")||h.attr("xlink:href");C(e)&&"[object SVGAnimatedString]"===e.toString()&&(e=Ca(e.animVal).href);J.test(e)||!e||h.attr("target")||a.isDefaultPrevented()||!l.$$parseLinkUrl(e,k)||(a.preventDefault(),l.absUrl()!==c.url()&&(d.$apply(),g.angular["ff-684208-preventDefault"]=!0))}}});lb(l.absUrl())!==lb(n)&&c.url(l.absUrl(),!0);var v=!0;
+c.onUrlChange(function(a,b){rc(a,r)?(d.$evalAsync(function(){var c=l.absUrl(),e=l.$$state,f;a=lb(a);l.$$parse(a);l.$$state=b;f=d.$broadcast("$locationChangeStart",a,c,b,e).defaultPrevented;l.absUrl()===a&&(f?(l.$$parse(c),l.$$state=e,h(c,!1,e)):(v=!1,k(c,e)))}),d.$$phase||d.$digest()):g.location.href=a});d.$watch(function(){if(v||l.$$urlUpdatedByLocation){l.$$urlUpdatedByLocation=!1;var a=lb(c.url()),b=lb(l.absUrl()),f=c.state(),g=l.$$replace,m=a!==b||l.$$html5&&e.history&&f!==l.$$state;if(v||m)v=
+!1,d.$evalAsync(function(){var b=l.absUrl(),c=d.$broadcast("$locationChangeStart",b,a,l.$$state,f).defaultPrevented;l.absUrl()===b&&(c?(l.$$parse(a),l.$$state=f):(m&&h(b,g,f===l.$$state?null:l.$$state),k(a,f)))})}l.$$replace=!1});return l}]}function Kf(){var a=!0,b=this;this.debugEnabled=function(b){return u(b)?(a=b,this):a};this.$get=["$window",function(d){function c(a){a instanceof Error&&(a.stack&&f?a=a.message&&-1===a.stack.indexOf(a.message)?"Error: "+a.message+"\n"+a.stack:a.stack:a.sourceURL&&
+(a=a.message+"\n"+a.sourceURL+":"+a.line));return a}function e(a){var b=d.console||{},e=b[a]||b.log||z;a=!1;try{a=!!e.apply}catch(f){}return a?function(){var a=[];q(arguments,function(b){a.push(c(b))});return e.apply(b,a)}:function(a,b){e(a,null==b?"":b)}}var f=za||/\bEdge\//.test(d.navigator&&d.navigator.userAgent);return{log:e("log"),info:e("info"),warn:e("warn"),error:e("error"),debug:function(){var c=e("debug");return function(){a&&c.apply(b,arguments)}}()}}]}function zg(a){return a+""}function Ag(a,
+b){return"undefined"!==typeof a?a:b}function Dd(a,b){return"undefined"===typeof a?b:"undefined"===typeof b?a:a+b}function U(a,b){var d,c,e;switch(a.type){case s.Program:d=!0;q(a.body,function(a){U(a.expression,b);d=d&&a.expression.constant});a.constant=d;break;case s.Literal:a.constant=!0;a.toWatch=[];break;case s.UnaryExpression:U(a.argument,b);a.constant=a.argument.constant;a.toWatch=a.argument.toWatch;break;case s.BinaryExpression:U(a.left,b);U(a.right,b);a.constant=a.left.constant&&a.right.constant;
+a.toWatch=a.left.toWatch.concat(a.right.toWatch);break;case s.LogicalExpression:U(a.left,b);U(a.right,b);a.constant=a.left.constant&&a.right.constant;a.toWatch=a.constant?[]:[a];break;case s.ConditionalExpression:U(a.test,b);U(a.alternate,b);U(a.consequent,b);a.constant=a.test.constant&&a.alternate.constant&&a.consequent.constant;a.toWatch=a.constant?[]:[a];break;case s.Identifier:a.constant=!1;a.toWatch=[a];break;case s.MemberExpression:U(a.object,b);a.computed&&U(a.property,b);a.constant=a.object.constant&&
+(!a.computed||a.property.constant);a.toWatch=[a];break;case s.CallExpression:d=e=a.filter?!b(a.callee.name).$stateful:!1;c=[];q(a.arguments,function(a){U(a,b);d=d&&a.constant;a.constant||c.push.apply(c,a.toWatch)});a.constant=d;a.toWatch=e?c:[a];break;case s.AssignmentExpression:U(a.left,b);U(a.right,b);a.constant=a.left.constant&&a.right.constant;a.toWatch=[a];break;case s.ArrayExpression:d=!0;c=[];q(a.elements,function(a){U(a,b);d=d&&a.constant;a.constant||c.push.apply(c,a.toWatch)});a.constant=
+d;a.toWatch=c;break;case s.ObjectExpression:d=!0;c=[];q(a.properties,function(a){U(a.value,b);d=d&&a.value.constant&&!a.computed;a.value.constant||c.push.apply(c,a.value.toWatch);a.computed&&(U(a.key,b),a.key.constant||c.push.apply(c,a.key.toWatch))});a.constant=d;a.toWatch=c;break;case s.ThisExpression:a.constant=!1;a.toWatch=[];break;case s.LocalsExpression:a.constant=!1,a.toWatch=[]}}function Ed(a){if(1===a.length){a=a[0].expression;var b=a.toWatch;return 1!==b.length?b:b[0]!==a?b:void 0}}function Fd(a){return a.type===
+s.Identifier||a.type===s.MemberExpression}function Gd(a){if(1===a.body.length&&Fd(a.body[0].expression))return{type:s.AssignmentExpression,left:a.body[0].expression,right:{type:s.NGValueParameter},operator:"="}}function Hd(a){this.$filter=a}function Id(a){this.$filter=a}function uc(a,b,d){this.ast=new s(a,d);this.astCompiler=d.csp?new Id(b):new Hd(b)}function vc(a){return D(a.valueOf)?a.valueOf():Bg.call(a)}function Lf(){var a=V(),b={"true":!0,"false":!1,"null":null,undefined:void 0},d,c;this.addLiteral=
+function(a,c){b[a]=c};this.setIdentifierFns=function(a,b){d=a;c=b;return this};this.$get=["$filter",function(e){function f(a,b,c){return null==a||null==b?a===b:"object"!==typeof a||(a=vc(a),"object"!==typeof a||c)?a===b||a!==a&&b!==b:!1}function g(a,b,c,d,e){var g=d.inputs,h;if(1===g.length){var k=f,g=g[0];return a.$watch(function(a){var b=g(a);f(b,k,d.literal)||(h=d(a,void 0,void 0,[b]),k=b&&vc(b));return h},b,c,e)}for(var l=[],m=[],n=0,E=g.length;n<E;n++)l[n]=f,m[n]=null;return a.$watch(function(a){for(var b=
+!1,c=0,e=g.length;c<e;c++){var k=g[c](a);if(b||(b=!f(k,l[c],d.literal)))m[c]=k,l[c]=k&&vc(k)}b&&(h=d(a,void 0,void 0,m));return h},b,c,e)}function h(a,b,c,d,e){function f(a){return d(a)}function h(a,c,d){n=a;D(b)&&b(a,c,d);l(a)&&d.$$postDigest(function(){l(n)&&m()})}var l=d.literal?k:u,m,n;return m=d.inputs?g(a,h,c,d,e):a.$watch(f,h,c)}function k(a){var b=!0;q(a,function(a){u(a)||(b=!1)});return b}function l(a,b,c,d){var e=a.$watch(function(a){e();return d(a)},b,c);return e}function m(a,b){function c(d,
+e,g,h){g=f&&h?h[0]:a(d,e,g,h);return b(g,d,e)}function d(c,e,g,k){g=f&&k?k[0]:a(c,e,g,k);c=b(g,c,e);return h(g)?c:g}if(!b)return a;var e=a.$$watchDelegate,f=!1,h=a.literal?k:u,l=a.oneTime?d:c;l.literal=a.literal;l.oneTime=a.oneTime;f=!a.inputs;e&&e!==g?(l.$$watchDelegate=e,l.inputs=a.inputs):b.$stateful||(l.$$watchDelegate=g,l.inputs=a.inputs?a.inputs:[a]);return l}var n={csp:Ga().noUnsafeEval,literals:ra(b),isIdentifierStart:D(d)&&d,isIdentifierContinue:D(c)&&c};return function(b,c){var d,f,k;switch(typeof b){case "string":return k=
+b=b.trim(),d=a[k],d||(":"===b.charAt(0)&&":"===b.charAt(1)&&(f=!0,b=b.substring(2)),d=new wc(n),d=(new uc(d,e,n)).parse(b),d.constant?d.$$watchDelegate=l:f?(d.oneTime=!0,d.$$watchDelegate=h):d.inputs&&(d.$$watchDelegate=g),a[k]=d),m(d,c);case "function":return m(b,c);default:return m(z,c)}}}]}function Nf(){var a=!0;this.$get=["$rootScope","$exceptionHandler",function(b,d){return Jd(function(a){b.$evalAsync(a)},d,a)}];this.errorOnUnhandledRejections=function(b){return u(b)?(a=b,this):a}}function Of(){var a=
+!0;this.$get=["$browser","$exceptionHandler",function(b,d){return Jd(function(a){b.defer(a)},d,a)}];this.errorOnUnhandledRejections=function(b){return u(b)?(a=b,this):a}}function Jd(a,b,d){function c(){return new e}function e(){var a=this.promise=new f;this.resolve=function(b){k(a,b)};this.reject=function(b){m(a,b)};this.notify=function(b){p(a,b)}}function f(){this.$$state={status:0}}function g(){for(;!s&&A.length;){var a=A.shift();if(!a.pur){a.pur=!0;var c=a.value,c="Possibly unhandled rejection: "+
+("function"===typeof c?c.toString().replace(/ \{[\s\S]*$/,""):w(c)?"undefined":"string"!==typeof c?Be(c,void 0):c);a.value instanceof Error?b(a.value,c):b(c)}}}function h(b){!d||b.pending||2!==b.status||b.pur||(0===s&&0===A.length&&a(g),A.push(b));!b.processScheduled&&b.pending&&(b.processScheduled=!0,++s,a(function(){var c,e,f;f=b.pending;b.processScheduled=!1;b.pending=void 0;try{for(var h=0,l=f.length;h<l;++h){b.pur=!0;e=f[h][0];c=f[h][b.status];try{D(c)?k(e,c(b.value)):1===b.status?k(e,b.value):
+m(e,b.value)}catch(n){m(e,n)}}}finally{--s,d&&0===s&&a(g)}}))}function k(a,b){a.$$state.status||(b===a?n(a,M("qcycle",b)):l(a,b))}function l(a,b){function c(b){g||(g=!0,l(a,b))}function d(b){g||(g=!0,n(a,b))}function e(b){p(a,b)}var f,g=!1;try{if(C(b)||D(b))f=b.then;D(f)?(a.$$state.status=-1,f.call(b,c,d,e)):(a.$$state.value=b,a.$$state.status=1,h(a.$$state))}catch(k){d(k)}}function m(a,b){a.$$state.status||n(a,b)}function n(a,b){a.$$state.value=b;a.$$state.status=2;h(a.$$state)}function p(c,d){var e=
+c.$$state.pending;0>=c.$$state.status&&e&&e.length&&a(function(){for(var a,c,f=0,g=e.length;f<g;f++){c=e[f][0];a=e[f][3];try{p(c,D(a)?a(d):d)}catch(h){b(h)}}})}function r(a){var b=new f;m(b,a);return b}function J(a,b,c){var d=null;try{D(c)&&(d=c())}catch(e){return r(e)}return d&&D(d.then)?d.then(function(){return b(a)},r):b(a)}function v(a,b,c,d){var e=new f;k(e,a);return e.then(b,c,d)}function t(a){if(!D(a))throw M("norslvr",a);var b=new f;a(function(a){k(b,a)},function(a){m(b,a)});return b}var M=
+L("$q",TypeError),s=0,A=[];S(f.prototype,{then:function(a,b,c){if(w(a)&&w(b)&&w(c))return this;var d=new f;this.$$state.pending=this.$$state.pending||[];this.$$state.pending.push([d,a,b,c]);0<this.$$state.status&&h(this.$$state);return d},"catch":function(a){return this.then(null,a)},"finally":function(a,b){return this.then(function(b){return J(b,u,a)},function(b){return J(b,r,a)},b)}});var u=v;t.prototype=f.prototype;t.defer=c;t.reject=r;t.when=v;t.resolve=u;t.all=function(a){var b=new f,c=0,d=H(a)?
+[]:{};q(a,function(a,e){c++;v(a).then(function(a){d[e]=a;--c||k(b,d)},function(a){m(b,a)})});0===c&&k(b,d);return b};t.race=function(a){var b=c();q(a,function(a){v(a).then(b.resolve,b.reject)});return b.promise};return t}function Xf(){this.$get=["$window","$timeout",function(a,b){var d=a.requestAnimationFrame||a.webkitRequestAnimationFrame,c=a.cancelAnimationFrame||a.webkitCancelAnimationFrame||a.webkitCancelRequestAnimationFrame,e=!!d,f=e?function(a){var b=d(a);return function(){c(b)}}:function(a){var c=
+b(a,16.66,!1);return function(){b.cancel(c)}};f.supported=e;return f}]}function Mf(){function a(a){function b(){this.$$watchers=this.$$nextSibling=this.$$childHead=this.$$childTail=null;this.$$listeners={};this.$$listenerCount={};this.$$watchersCount=0;this.$id=++qb;this.$$ChildScope=null}b.prototype=a;return b}var b=10,d=L("$rootScope"),c=null,e=null;this.digestTtl=function(a){arguments.length&&(b=a);return b};this.$get=["$exceptionHandler","$parse","$browser",function(f,g,h){function k(a){a.currentScope.$$destroyed=
+!0}function l(a){9===za&&(a.$$childHead&&l(a.$$childHead),a.$$nextSibling&&l(a.$$nextSibling));a.$parent=a.$$nextSibling=a.$$prevSibling=a.$$childHead=a.$$childTail=a.$root=a.$$watchers=null}function m(){this.$id=++qb;this.$$phase=this.$parent=this.$$watchers=this.$$nextSibling=this.$$prevSibling=this.$$childHead=this.$$childTail=null;this.$root=this;this.$$destroyed=!1;this.$$listeners={};this.$$listenerCount={};this.$$watchersCount=0;this.$$isolateBindings=null}function n(a){if(M.$$phase)throw d("inprog",
+M.$$phase);M.$$phase=a}function p(a,b){do a.$$watchersCount+=b;while(a=a.$parent)}function r(a,b,c){do a.$$listenerCount[c]-=b,0===a.$$listenerCount[c]&&delete a.$$listenerCount[c];while(a=a.$parent)}function J(){}function v(){for(;u.length;)try{u.shift()()}catch(a){f(a)}e=null}function t(){null===e&&(e=h.defer(function(){M.$apply(v)}))}m.prototype={constructor:m,$new:function(b,c){var d;c=c||this;b?(d=new m,d.$root=this.$root):(this.$$ChildScope||(this.$$ChildScope=a(this)),d=new this.$$ChildScope);
+d.$parent=c;d.$$prevSibling=c.$$childTail;c.$$childHead?(c.$$childTail.$$nextSibling=d,c.$$childTail=d):c.$$childHead=c.$$childTail=d;(b||c!==this)&&d.$on("$destroy",k);return d},$watch:function(a,b,d,e){var f=g(a);if(f.$$watchDelegate)return f.$$watchDelegate(this,b,d,f,a);var h=this,k=h.$$watchers,l={fn:b,last:J,get:f,exp:e||a,eq:!!d};c=null;D(b)||(l.fn=z);k||(k=h.$$watchers=[],k.$$digestWatchIndex=-1);k.unshift(l);k.$$digestWatchIndex++;p(this,1);return function(){var a=$a(k,l);0<=a&&(p(h,-1),
+a<k.$$digestWatchIndex&&k.$$digestWatchIndex--);c=null}},$watchGroup:function(a,b){function c(){h=!1;k?(k=!1,b(e,e,g)):b(e,d,g)}var d=Array(a.length),e=Array(a.length),f=[],g=this,h=!1,k=!0;if(!a.length){var l=!0;g.$evalAsync(function(){l&&b(e,e,g)});return function(){l=!1}}if(1===a.length)return this.$watch(a[0],function(a,c,f){e[0]=a;d[0]=c;b(e,a===c?e:d,f)});q(a,function(a,b){var k=g.$watch(a,function(a,f){e[b]=a;d[b]=f;h||(h=!0,g.$evalAsync(c))});f.push(k)});return function(){for(;f.length;)f.shift()()}},
+$watchCollection:function(a,b){function c(a){e=a;var b,d,g,h;if(!w(e)){if(C(e))if(qa(e))for(f!==n&&(f=n,t=f.length=0,l++),a=e.length,t!==a&&(l++,f.length=t=a),b=0;b<a;b++)h=f[b],g=e[b],d=h!==h&&g!==g,d||h===g||(l++,f[b]=g);else{f!==p&&(f=p={},t=0,l++);a=0;for(b in e)ua.call(e,b)&&(a++,g=e[b],h=f[b],b in f?(d=h!==h&&g!==g,d||h===g||(l++,f[b]=g)):(t++,f[b]=g,l++));if(t>a)for(b in l++,f)ua.call(e,b)||(t--,delete f[b])}else f!==e&&(f=e,l++);return l}}c.$stateful=!0;var d=this,e,f,h,k=1<b.length,l=0,m=
+g(a,c),n=[],p={},r=!0,t=0;return this.$watch(m,function(){r?(r=!1,b(e,e,d)):b(e,h,d);if(k)if(C(e))if(qa(e)){h=Array(e.length);for(var a=0;a<e.length;a++)h[a]=e[a]}else for(a in h={},e)ua.call(e,a)&&(h[a]=e[a]);else h=e})},$digest:function(){var a,g,k,l,m,p,r,t=b,q,u=[],w,x;n("$digest");h.$$checkUrlChange();this===M&&null!==e&&(h.defer.cancel(e),v());c=null;do{r=!1;q=this;for(p=0;p<s.length;p++){try{x=s[p],l=x.fn,l(x.scope,x.locals)}catch(z){f(z)}c=null}s.length=0;a:do{if(p=q.$$watchers)for(p.$$digestWatchIndex=
+p.length;p.$$digestWatchIndex--;)try{if(a=p[p.$$digestWatchIndex])if(m=a.get,(g=m(q))!==(k=a.last)&&!(a.eq?sa(g,k):da(g)&&da(k)))r=!0,c=a,a.last=a.eq?ra(g,null):g,l=a.fn,l(g,k===J?g:k,q),5>t&&(w=4-t,u[w]||(u[w]=[]),u[w].push({msg:D(a.exp)?"fn: "+(a.exp.name||a.exp.toString()):a.exp,newVal:g,oldVal:k}));else if(a===c){r=!1;break a}}catch(B){f(B)}if(!(p=q.$$watchersCount&&q.$$childHead||q!==this&&q.$$nextSibling))for(;q!==this&&!(p=q.$$nextSibling);)q=q.$parent}while(q=p);if((r||s.length)&&!t--)throw M.$$phase=
+null,d("infdig",b,u);}while(r||s.length);for(M.$$phase=null;I<A.length;)try{A[I++]()}catch(F){f(F)}A.length=I=0;h.$$checkUrlChange()},$destroy:function(){if(!this.$$destroyed){var a=this.$parent;this.$broadcast("$destroy");this.$$destroyed=!0;this===M&&h.$$applicationDestroyed();p(this,-this.$$watchersCount);for(var b in this.$$listenerCount)r(this,this.$$listenerCount[b],b);a&&a.$$childHead===this&&(a.$$childHead=this.$$nextSibling);a&&a.$$childTail===this&&(a.$$childTail=this.$$prevSibling);this.$$prevSibling&&
+(this.$$prevSibling.$$nextSibling=this.$$nextSibling);this.$$nextSibling&&(this.$$nextSibling.$$prevSibling=this.$$prevSibling);this.$destroy=this.$digest=this.$apply=this.$evalAsync=this.$applyAsync=z;this.$on=this.$watch=this.$watchGroup=function(){return z};this.$$listeners={};this.$$nextSibling=null;l(this)}},$eval:function(a,b){return g(a)(this,b)},$evalAsync:function(a,b){M.$$phase||s.length||h.defer(function(){s.length&&M.$digest()});s.push({scope:this,fn:g(a),locals:b})},$$postDigest:function(a){A.push(a)},
+$apply:function(a){try{n("$apply");try{return this.$eval(a)}finally{M.$$phase=null}}catch(b){f(b)}finally{try{M.$digest()}catch(c){throw f(c),c;}}},$applyAsync:function(a){function b(){c.$eval(a)}var c=this;a&&u.push(b);a=g(a);t()},$on:function(a,b){var c=this.$$listeners[a];c||(this.$$listeners[a]=c=[]);c.push(b);var d=this;do d.$$listenerCount[a]||(d.$$listenerCount[a]=0),d.$$listenerCount[a]++;while(d=d.$parent);var e=this;return function(){var d=c.indexOf(b);-1!==d&&(c[d]=null,r(e,1,a))}},$emit:function(a,
+b){var c=[],d,e=this,g=!1,h={name:a,targetScope:e,stopPropagation:function(){g=!0},preventDefault:function(){h.defaultPrevented=!0},defaultPrevented:!1},k=ab([h],arguments,1),l,m;do{d=e.$$listeners[a]||c;h.currentScope=e;l=0;for(m=d.length;l<m;l++)if(d[l])try{d[l].apply(null,k)}catch(n){f(n)}else d.splice(l,1),l--,m--;if(g)return h.currentScope=null,h;e=e.$parent}while(e);h.currentScope=null;return h},$broadcast:function(a,b){var c=this,d=this,e={name:a,targetScope:this,preventDefault:function(){e.defaultPrevented=
+!0},defaultPrevented:!1};if(!this.$$listenerCount[a])return e;for(var g=ab([e],arguments,1),h,k;c=d;){e.currentScope=c;d=c.$$listeners[a]||[];h=0;for(k=d.length;h<k;h++)if(d[h])try{d[h].apply(null,g)}catch(l){f(l)}else d.splice(h,1),h--,k--;if(!(d=c.$$listenerCount[a]&&c.$$childHead||c!==this&&c.$$nextSibling))for(;c!==this&&!(d=c.$$nextSibling);)c=c.$parent}e.currentScope=null;return e}};var M=new m,s=M.$$asyncQueue=[],A=M.$$postDigestQueue=[],u=M.$$applyAsyncQueue=[],I=0;return M}]}function Ee(){var a=
+/^\s*(https?|ftp|mailto|tel|file):/,b=/^\s*((https?|ftp|file|blob):|data:image\/)/;this.aHrefSanitizationWhitelist=function(b){return u(b)?(a=b,this):a};this.imgSrcSanitizationWhitelist=function(a){return u(a)?(b=a,this):b};this.$get=function(){return function(d,c){var e=c?b:a,f;f=Ca(d).href;return""===f||f.match(e)?d:"unsafe:"+f}}}function Cg(a){if("self"===a)return a;if(F(a)){if(-1<a.indexOf("***"))throw ta("iwcard",a);a=Kd(a).replace(/\\\*\\\*/g,".*").replace(/\\\*/g,"[^:/.?&;]*");return new RegExp("^"+
+a+"$")}if(Xa(a))return new RegExp("^"+a.source+"$");throw ta("imatcher");}function Ld(a){var b=[];u(a)&&q(a,function(a){b.push(Cg(a))});return b}function Qf(){this.SCE_CONTEXTS=oa;var a=["self"],b=[];this.resourceUrlWhitelist=function(b){arguments.length&&(a=Ld(b));return a};this.resourceUrlBlacklist=function(a){arguments.length&&(b=Ld(a));return b};this.$get=["$injector",function(d){function c(a,b){return"self"===a?yd(b):!!a.exec(b.href)}function e(a){var b=function(a){this.$$unwrapTrustedValue=
+function(){return a}};a&&(b.prototype=new a);b.prototype.valueOf=function(){return this.$$unwrapTrustedValue()};b.prototype.toString=function(){return this.$$unwrapTrustedValue().toString()};return b}var f=function(a){throw ta("unsafe");};d.has("$sanitize")&&(f=d.get("$sanitize"));var g=e(),h={};h[oa.HTML]=e(g);h[oa.CSS]=e(g);h[oa.URL]=e(g);h[oa.JS]=e(g);h[oa.RESOURCE_URL]=e(h[oa.URL]);return{trustAs:function(a,b){var c=h.hasOwnProperty(a)?h[a]:null;if(!c)throw ta("icontext",a,b);if(null===b||w(b)||
+""===b)return b;if("string"!==typeof b)throw ta("itype",a);return new c(b)},getTrusted:function(d,e){if(null===e||w(e)||""===e)return e;var g=h.hasOwnProperty(d)?h[d]:null;if(g&&e instanceof g)return e.$$unwrapTrustedValue();if(d===oa.RESOURCE_URL){var g=Ca(e.toString()),n,p,r=!1;n=0;for(p=a.length;n<p;n++)if(c(a[n],g)){r=!0;break}if(r)for(n=0,p=b.length;n<p;n++)if(c(b[n],g)){r=!1;break}if(r)return e;throw ta("insecurl",e.toString());}if(d===oa.HTML)return f(e);throw ta("unsafe");},valueOf:function(a){return a instanceof
+g?a.$$unwrapTrustedValue():a}}}]}function Pf(){var a=!0;this.enabled=function(b){arguments.length&&(a=!!b);return a};this.$get=["$parse","$sceDelegate",function(b,d){if(a&&8>za)throw ta("iequirks");var c=pa(oa);c.isEnabled=function(){return a};c.trustAs=d.trustAs;c.getTrusted=d.getTrusted;c.valueOf=d.valueOf;a||(c.trustAs=c.getTrusted=function(a,b){return b},c.valueOf=Ya);c.parseAs=function(a,d){var e=b(d);return e.literal&&e.constant?e:b(d,function(b){return c.getTrusted(a,b)})};var e=c.parseAs,
+f=c.getTrusted,g=c.trustAs;q(oa,function(a,b){var d=Q(b);c[("parse_as_"+d).replace(xc,gb)]=function(b){return e(a,b)};c[("get_trusted_"+d).replace(xc,gb)]=function(b){return f(a,b)};c[("trust_as_"+d).replace(xc,gb)]=function(b){return g(a,b)}});return c}]}function Rf(){this.$get=["$window","$document",function(a,b){var d={},c=!((!a.nw||!a.nw.process)&&a.chrome&&(a.chrome.app&&a.chrome.app.runtime||!a.chrome.app&&a.chrome.runtime&&a.chrome.runtime.id))&&a.history&&a.history.pushState,e=Z((/android (\d+)/.exec(Q((a.navigator||
+{}).userAgent))||[])[1]),f=/Boxee/i.test((a.navigator||{}).userAgent),g=b[0]||{},h=g.body&&g.body.style,k=!1,l=!1;h&&(k=!!("transition"in h||"webkitTransition"in h),l=!!("animation"in h||"webkitAnimation"in h));return{history:!(!c||4>e||f),hasEvent:function(a){if("input"===a&&za)return!1;if(w(d[a])){var b=g.createElement("div");d[a]="on"+a in b}return d[a]},csp:Ga(),transitions:k,animations:l,android:e}}]}function Tf(){var a;this.httpOptions=function(b){return b?(a=b,this):a};this.$get=["$exceptionHandler",
+"$templateCache","$http","$q","$sce",function(b,d,c,e,f){function g(h,k){g.totalPendingRequests++;if(!F(h)||w(d.get(h)))h=f.getTrustedResourceUrl(h);var l=c.defaults&&c.defaults.transformResponse;H(l)?l=l.filter(function(a){return a!==nc}):l===nc&&(l=null);return c.get(h,S({cache:d,transformResponse:l},a)).finally(function(){g.totalPendingRequests--}).then(function(a){d.put(h,a.data);return a.data},function(a){k||(a=Dg("tpload",h,a.status,a.statusText),b(a));return e.reject(a)})}g.totalPendingRequests=
+0;return g}]}function Uf(){this.$get=["$rootScope","$browser","$location",function(a,b,d){return{findBindings:function(a,b,d){a=a.getElementsByClassName("ng-binding");var g=[];q(a,function(a){var c=ea.element(a).data("$binding");c&&q(c,function(c){d?(new RegExp("(^|\\s)"+Kd(b)+"(\\s|\\||$)")).test(c)&&g.push(a):-1!==c.indexOf(b)&&g.push(a)})});return g},findModels:function(a,b,d){for(var g=["ng-","data-ng-","ng\\:"],h=0;h<g.length;++h){var k=a.querySelectorAll("["+g[h]+"model"+(d?"=":"*=")+'"'+b+
+'"]');if(k.length)return k}},getLocation:function(){return d.url()},setLocation:function(b){b!==d.url()&&(d.url(b),a.$digest())},whenStable:function(a){b.notifyWhenNoOutstandingRequests(a)}}}]}function Vf(){this.$get=["$rootScope","$browser","$q","$$q","$exceptionHandler",function(a,b,d,c,e){function f(f,k,l){D(f)||(l=k,k=f,f=z);var m=va.call(arguments,3),n=u(l)&&!l,p=(n?c:d).defer(),r=p.promise,q;q=b.defer(function(){try{p.resolve(f.apply(null,m))}catch(b){p.reject(b),e(b)}finally{delete g[r.$$timeoutId]}n||
+a.$apply()},k);r.$$timeoutId=q;g[q]=p;return r}var g={};f.cancel=function(a){return a&&a.$$timeoutId in g?(g[a.$$timeoutId].promise.catch(z),g[a.$$timeoutId].reject("canceled"),delete g[a.$$timeoutId],b.defer.cancel(a.$$timeoutId)):!1};return f}]}function Ca(a){za&&(aa.setAttribute("href",a),a=aa.href);aa.setAttribute("href",a);return{href:aa.href,protocol:aa.protocol?aa.protocol.replace(/:$/,""):"",host:aa.host,search:aa.search?aa.search.replace(/^\?/,""):"",hash:aa.hash?aa.hash.replace(/^#/,""):
+"",hostname:aa.hostname,port:aa.port,pathname:"/"===aa.pathname.charAt(0)?aa.pathname:"/"+aa.pathname}}function yd(a){a=F(a)?Ca(a):a;return a.protocol===Md.protocol&&a.host===Md.host}function Wf(){this.$get=la(x)}function Nd(a){function b(a){try{return decodeURIComponent(a)}catch(b){return a}}var d=a[0]||{},c={},e="";return function(){var a,g,h,k,l;try{a=d.cookie||""}catch(m){a=""}if(a!==e)for(e=a,a=e.split("; "),c={},h=0;h<a.length;h++)g=a[h],k=g.indexOf("="),0<k&&(l=b(g.substring(0,k)),w(c[l])&&
+(c[l]=b(g.substring(k+1))));return c}}function $f(){this.$get=Nd}function cd(a){function b(d,c){if(C(d)){var e={};q(d,function(a,c){e[c]=b(c,a)});return e}return a.factory(d+"Filter",c)}this.register=b;this.$get=["$injector",function(a){return function(b){return a.get(b+"Filter")}}];b("currency",Od);b("date",Pd);b("filter",Eg);b("json",Fg);b("limitTo",Gg);b("lowercase",Hg);b("number",Qd);b("orderBy",Rd);b("uppercase",Ig)}function Eg(){return function(a,b,d,c){if(!qa(a)){if(null==a)return a;throw L("filter")("notarray",
+a);}c=c||"$";var e;switch(yc(b)){case "function":break;case "boolean":case "null":case "number":case "string":e=!0;case "object":b=Jg(b,d,c,e);break;default:return a}return Array.prototype.filter.call(a,b)}}function Jg(a,b,d,c){var e=C(a)&&d in a;!0===b?b=sa:D(b)||(b=function(a,b){if(w(a))return!1;if(null===a||null===b)return a===b;if(C(b)||C(a)&&!Wb(a))return!1;a=Q(""+a);b=Q(""+b);return-1!==a.indexOf(b)});return function(f){return e&&!C(f)?Ea(f,a[d],b,d,!1):Ea(f,a,b,d,c)}}function Ea(a,b,d,c,e,
+f){var g=yc(a),h=yc(b);if("string"===h&&"!"===b.charAt(0))return!Ea(a,b.substring(1),d,c,e);if(H(a))return a.some(function(a){return Ea(a,b,d,c,e)});switch(g){case "object":var k;if(e){for(k in a)if(k.charAt&&"$"!==k.charAt(0)&&Ea(a[k],b,d,c,!0))return!0;return f?!1:Ea(a,b,d,c,!1)}if("object"===h){for(k in b)if(f=b[k],!D(f)&&!w(f)&&(g=k===c,!Ea(g?a:a[k],f,d,c,g,g)))return!1;return!0}return d(a,b);case "function":return!1;default:return d(a,b)}}function yc(a){return null===a?"null":typeof a}function Od(a){var b=
+a.NUMBER_FORMATS;return function(a,c,e){w(c)&&(c=b.CURRENCY_SYM);w(e)&&(e=b.PATTERNS[1].maxFrac);return null==a?a:Sd(a,b.PATTERNS[1],b.GROUP_SEP,b.DECIMAL_SEP,e).replace(/\u00A4/g,c)}}function Qd(a){var b=a.NUMBER_FORMATS;return function(a,c){return null==a?a:Sd(a,b.PATTERNS[0],b.GROUP_SEP,b.DECIMAL_SEP,c)}}function Kg(a){var b=0,d,c,e,f,g;-1<(c=a.indexOf(Td))&&(a=a.replace(Td,""));0<(e=a.search(/e/i))?(0>c&&(c=e),c+=+a.slice(e+1),a=a.substring(0,e)):0>c&&(c=a.length);for(e=0;a.charAt(e)===zc;e++);
+if(e===(g=a.length))d=[0],c=1;else{for(g--;a.charAt(g)===zc;)g--;c-=e;d=[];for(f=0;e<=g;e++,f++)d[f]=+a.charAt(e)}c>Ud&&(d=d.splice(0,Ud-1),b=c-1,c=1);return{d:d,e:b,i:c}}function Lg(a,b,d,c){var e=a.d,f=e.length-a.i;b=w(b)?Math.min(Math.max(d,f),c):+b;d=b+a.i;c=e[d];if(0<d){e.splice(Math.max(a.i,d));for(var g=d;g<e.length;g++)e[g]=0}else for(f=Math.max(0,f),a.i=1,e.length=Math.max(1,d=b+1),e[0]=0,g=1;g<d;g++)e[g]=0;if(5<=c)if(0>d-1){for(c=0;c>d;c--)e.unshift(0),a.i++;e.unshift(1);a.i++}else e[d-
+1]++;for(;f<Math.max(0,b);f++)e.push(0);if(b=e.reduceRight(function(a,b,c,d){b+=a;d[c]=b%10;return Math.floor(b/10)},0))e.unshift(b),a.i++}function Sd(a,b,d,c,e){if(!F(a)&&!ba(a)||isNaN(a))return"";var f=!isFinite(a),g=!1,h=Math.abs(a)+"",k="";if(f)k="\u221e";else{g=Kg(h);Lg(g,e,b.minFrac,b.maxFrac);k=g.d;h=g.i;e=g.e;f=[];for(g=k.reduce(function(a,b){return a&&!b},!0);0>h;)k.unshift(0),h++;0<h?f=k.splice(h,k.length):(f=k,k=[0]);h=[];for(k.length>=b.lgSize&&h.unshift(k.splice(-b.lgSize,k.length).join(""));k.length>
+b.gSize;)h.unshift(k.splice(-b.gSize,k.length).join(""));k.length&&h.unshift(k.join(""));k=h.join(d);f.length&&(k+=c+f.join(""));e&&(k+="e+"+e)}return 0>a&&!g?b.negPre+k+b.negSuf:b.posPre+k+b.posSuf}function Kb(a,b,d,c){var e="";if(0>a||c&&0>=a)c?a=-a+1:(a=-a,e="-");for(a=""+a;a.length<b;)a=zc+a;d&&(a=a.substr(a.length-b));return e+a}function Y(a,b,d,c,e){d=d||0;return function(f){f=f["get"+a]();if(0<d||f>-d)f+=d;0===f&&-12===d&&(f=12);return Kb(f,b,c,e)}}function mb(a,b,d){return function(c,e){var f=
+c["get"+a](),g=ub((d?"STANDALONE":"")+(b?"SHORT":"")+a);return e[g][f]}}function Vd(a){var b=(new Date(a,0,1)).getDay();return new Date(a,0,(4>=b?5:12)-b)}function Wd(a){return function(b){var d=Vd(b.getFullYear());b=+new Date(b.getFullYear(),b.getMonth(),b.getDate()+(4-b.getDay()))-+d;b=1+Math.round(b/6048E5);return Kb(b,a)}}function Ac(a,b){return 0>=a.getFullYear()?b.ERAS[0]:b.ERAS[1]}function Pd(a){function b(a){var b;if(b=a.match(d)){a=new Date(0);var f=0,g=0,h=b[8]?a.setUTCFullYear:a.setFullYear,
+k=b[8]?a.setUTCHours:a.setHours;b[9]&&(f=Z(b[9]+b[10]),g=Z(b[9]+b[11]));h.call(a,Z(b[1]),Z(b[2])-1,Z(b[3]));f=Z(b[4]||0)-f;g=Z(b[5]||0)-g;h=Z(b[6]||0);b=Math.round(1E3*parseFloat("0."+(b[7]||0)));k.call(a,f,g,h,b)}return a}var d=/^(\d{4})-?(\d\d)-?(\d\d)(?:T(\d\d)(?::?(\d\d)(?::?(\d\d)(?:\.(\d+))?)?)?(Z|([+-])(\d\d):?(\d\d))?)?$/;return function(c,d,f){var g="",h=[],k,l;d=d||"mediumDate";d=a.DATETIME_FORMATS[d]||d;F(c)&&(c=Mg.test(c)?Z(c):b(c));ba(c)&&(c=new Date(c));if(!ga(c)||!isFinite(c.getTime()))return c;
+for(;d;)(l=Ng.exec(d))?(h=ab(h,l,1),d=h.pop()):(h.push(d),d=null);var m=c.getTimezoneOffset();f&&(m=Pc(f,m),c=Yb(c,f,!0));q(h,function(b){k=Og[b];g+=k?k(c,a.DATETIME_FORMATS,m):"''"===b?"'":b.replace(/(^'|'$)/g,"").replace(/''/g,"'")});return g}}function Fg(){return function(a,b){w(b)&&(b=2);return cb(a,b)}}function Gg(){return function(a,b,d){b=Infinity===Math.abs(Number(b))?Number(b):Z(b);if(da(b))return a;ba(a)&&(a=a.toString());if(!qa(a))return a;d=!d||isNaN(d)?0:Z(d);d=0>d?Math.max(0,a.length+
+d):d;return 0<=b?Bc(a,d,d+b):0===d?Bc(a,b,a.length):Bc(a,Math.max(0,d+b),d)}}function Bc(a,b,d){return F(a)?a.slice(b,d):va.call(a,b,d)}function Rd(a){function b(b){return b.map(function(b){var c=1,d=Ya;if(D(b))d=b;else if(F(b)){if("+"===b.charAt(0)||"-"===b.charAt(0))c="-"===b.charAt(0)?-1:1,b=b.substring(1);if(""!==b&&(d=a(b),d.constant))var e=d(),d=function(a){return a[e]}}return{get:d,descending:c}})}function d(a){switch(typeof a){case "number":case "boolean":case "string":return!0;default:return!1}}
+function c(a,b){var c=0,d=a.type,k=b.type;if(d===k){var k=a.value,l=b.value;"string"===d?(k=k.toLowerCase(),l=l.toLowerCase()):"object"===d&&(C(k)&&(k=a.index),C(l)&&(l=b.index));k!==l&&(c=k<l?-1:1)}else c=d<k?-1:1;return c}return function(a,f,g,h){if(null==a)return a;if(!qa(a))throw L("orderBy")("notarray",a);H(f)||(f=[f]);0===f.length&&(f=["+"]);var k=b(f),l=g?-1:1,m=D(h)?h:c;a=Array.prototype.map.call(a,function(a,b){return{value:a,tieBreaker:{value:b,type:"number",index:b},predicateValues:k.map(function(c){var e=
+c.get(a);c=typeof e;if(null===e)c="string",e="null";else if("object"===c)a:{if(D(e.valueOf)&&(e=e.valueOf(),d(e)))break a;Wb(e)&&(e=e.toString(),d(e))}return{value:e,type:c,index:b}})}});a.sort(function(a,b){for(var c=0,d=k.length;c<d;c++){var e=m(a.predicateValues[c],b.predicateValues[c]);if(e)return e*k[c].descending*l}return m(a.tieBreaker,b.tieBreaker)*l});return a=a.map(function(a){return a.value})}}function Qa(a){D(a)&&(a={link:a});a.restrict=a.restrict||"AC";return la(a)}function Lb(a,b,d,
+c,e){this.$$controls=[];this.$error={};this.$$success={};this.$pending=void 0;this.$name=e(b.name||b.ngForm||"")(d);this.$dirty=!1;this.$valid=this.$pristine=!0;this.$submitted=this.$invalid=!1;this.$$parentForm=Mb;this.$$element=a;this.$$animate=c;Xd(this)}function Xd(a){a.$$classCache={};a.$$classCache[Yd]=!(a.$$classCache[nb]=a.$$element.hasClass(nb))}function Zd(a){function b(a,b,c){c&&!a.$$classCache[b]?(a.$$animate.addClass(a.$$element,b),a.$$classCache[b]=!0):!c&&a.$$classCache[b]&&(a.$$animate.removeClass(a.$$element,
+b),a.$$classCache[b]=!1)}function d(a,c,d){c=c?"-"+Tc(c,"-"):"";b(a,nb+c,!0===d);b(a,Yd+c,!1===d)}var c=a.set,e=a.unset;a.clazz.prototype.$setValidity=function(a,g,h){w(g)?(this.$pending||(this.$pending={}),c(this.$pending,a,h)):(this.$pending&&e(this.$pending,a,h),$d(this.$pending)&&(this.$pending=void 0));Ha(g)?g?(e(this.$error,a,h),c(this.$$success,a,h)):(c(this.$error,a,h),e(this.$$success,a,h)):(e(this.$error,a,h),e(this.$$success,a,h));this.$pending?(b(this,"ng-pending",!0),this.$valid=this.$invalid=
+void 0,d(this,"",null)):(b(this,"ng-pending",!1),this.$valid=$d(this.$error),this.$invalid=!this.$valid,d(this,"",this.$valid));g=this.$pending&&this.$pending[a]?void 0:this.$error[a]?!1:this.$$success[a]?!0:null;d(this,a,g);this.$$parentForm.$setValidity(a,g,this)}}function $d(a){if(a)for(var b in a)if(a.hasOwnProperty(b))return!1;return!0}function Cc(a){a.$formatters.push(function(b){return a.$isEmpty(b)?b:b.toString()})}function Ra(a,b,d,c,e,f){var g=Q(b[0].type);if(!e.android){var h=!1;b.on("compositionstart",
+function(){h=!0});b.on("compositionend",function(){h=!1;l()})}var k,l=function(a){k&&(f.defer.cancel(k),k=null);if(!h){var e=b.val();a=a&&a.type;"password"===g||d.ngTrim&&"false"===d.ngTrim||(e=T(e));(c.$viewValue!==e||""===e&&c.$$hasNativeValidators)&&c.$setViewValue(e,a)}};if(e.hasEvent("input"))b.on("input",l);else{var m=function(a,b,c){k||(k=f.defer(function(){k=null;b&&b.value===c||l(a)}))};b.on("keydown",function(a){var b=a.keyCode;91===b||15<b&&19>b||37<=b&&40>=b||m(a,this,this.value)});if(e.hasEvent("paste"))b.on("paste cut",
+m)}b.on("change",l);if(ae[g]&&c.$$hasNativeValidators&&g===d.type)b.on("keydown wheel mousedown",function(a){if(!k){var b=this.validity,c=b.badInput,d=b.typeMismatch;k=f.defer(function(){k=null;b.badInput===c&&b.typeMismatch===d||l(a)})}});c.$render=function(){var a=c.$isEmpty(c.$viewValue)?"":c.$viewValue;b.val()!==a&&b.val(a)}}function Nb(a,b){return function(d,c){var e,f;if(ga(d))return d;if(F(d)){'"'===d.charAt(0)&&'"'===d.charAt(d.length-1)&&(d=d.substring(1,d.length-1));if(Pg.test(d))return new Date(d);
+a.lastIndex=0;if(e=a.exec(d))return e.shift(),f=c?{yyyy:c.getFullYear(),MM:c.getMonth()+1,dd:c.getDate(),HH:c.getHours(),mm:c.getMinutes(),ss:c.getSeconds(),sss:c.getMilliseconds()/1E3}:{yyyy:1970,MM:1,dd:1,HH:0,mm:0,ss:0,sss:0},q(e,function(a,c){c<b.length&&(f[b[c]]=+a)}),new Date(f.yyyy,f.MM-1,f.dd,f.HH,f.mm,f.ss||0,1E3*f.sss||0)}return NaN}}function ob(a,b,d,c){return function(e,f,g,h,k,l,m){function n(a){return a&&!(a.getTime&&a.getTime()!==a.getTime())}function p(a){return u(a)&&!ga(a)?d(a)||
+void 0:a}Dc(e,f,g,h);Ra(e,f,g,h,k,l);var r=h&&h.$options.getOption("timezone"),q;h.$$parserName=a;h.$parsers.push(function(a){if(h.$isEmpty(a))return null;if(b.test(a))return a=d(a,q),r&&(a=Yb(a,r)),a});h.$formatters.push(function(a){if(a&&!ga(a))throw pb("datefmt",a);if(n(a))return(q=a)&&r&&(q=Yb(q,r,!0)),m("date")(a,c,r);q=null;return""});if(u(g.min)||g.ngMin){var v;h.$validators.min=function(a){return!n(a)||w(v)||d(a)>=v};g.$observe("min",function(a){v=p(a);h.$validate()})}if(u(g.max)||g.ngMax){var t;
+h.$validators.max=function(a){return!n(a)||w(t)||d(a)<=t};g.$observe("max",function(a){t=p(a);h.$validate()})}}}function Dc(a,b,d,c){(c.$$hasNativeValidators=C(b[0].validity))&&c.$parsers.push(function(a){var c=b.prop("validity")||{};return c.badInput||c.typeMismatch?void 0:a})}function be(a){a.$$parserName="number";a.$parsers.push(function(b){if(a.$isEmpty(b))return null;if(Qg.test(b))return parseFloat(b)});a.$formatters.push(function(b){if(!a.$isEmpty(b)){if(!ba(b))throw pb("numfmt",b);b=b.toString()}return b})}
+function Sa(a){u(a)&&!ba(a)&&(a=parseFloat(a));return da(a)?void 0:a}function Ec(a){var b=a.toString(),d=b.indexOf(".");return-1===d?-1<a&&1>a&&(a=/e-(\d+)$/.exec(b))?Number(a[1]):0:b.length-d-1}function ce(a,b,d){a=Number(a);var c=(a|0)!==a,e=(b|0)!==b,f=(d|0)!==d;if(c||e||f){var g=c?Ec(a):0,h=e?Ec(b):0,k=f?Ec(d):0,g=Math.max(g,h,k),g=Math.pow(10,g);a*=g;b*=g;d*=g;c&&(a=Math.round(a));e&&(b=Math.round(b));f&&(d=Math.round(d))}return 0===(a-b)%d}function de(a,b,d,c,e){if(u(c)){a=a(c);if(!a.constant)throw pb("constexpr",
+d,c);return a(b)}return e}function Fc(a,b){function d(a,b){if(!a||!a.length)return[];if(!b||!b.length)return a;var c=[],d=0;a:for(;d<a.length;d++){for(var e=a[d],m=0;m<b.length;m++)if(e===b[m])continue a;c.push(e)}return c}function c(a){var b=a;H(a)?b=a.map(c).join(" "):C(a)&&(b=Object.keys(a).filter(function(b){return a[b]}).join(" "));return b}a="ngClass"+a;var e;return["$parse",function(f){return{restrict:"AC",link:function(g,h,k){function l(a,b){var c=[];q(a,function(a){if(0<b||n[a])n[a]=(n[a]||
+0)+b,n[a]===+(0<b)&&c.push(a)});return c.join(" ")}function m(a){if(a===b){var c=r,c=l(c&&c.split(" "),1);k.$addClass(c)}else c=r,c=l(c&&c.split(" "),-1),k.$removeClass(c);p=a}var n=h.data("$classCounts"),p=!0,r;n||(n=V(),h.data("$classCounts",n));"ngClass"!==a&&(e||(e=f("$index",function(a){return a&1})),g.$watch(e,m));g.$watch(f(k[a],c),function(a){F(a)||(a=c(a));if(p===b){var e=a,f=r&&r.split(" "),g=e&&e.split(" "),e=d(f,g),f=d(g,f),e=l(e,-1),f=l(f,1);k.$addClass(f);k.$removeClass(e)}r=a})}}}]}
+function Ob(a,b,d,c,e,f,g,h,k){this.$modelValue=this.$viewValue=Number.NaN;this.$$rawModelValue=void 0;this.$validators={};this.$asyncValidators={};this.$parsers=[];this.$formatters=[];this.$viewChangeListeners=[];this.$untouched=!0;this.$touched=!1;this.$pristine=!0;this.$dirty=!1;this.$valid=!0;this.$invalid=!1;this.$error={};this.$$success={};this.$pending=void 0;this.$name=k(d.name||"",!1)(a);this.$$parentForm=Mb;this.$options=Pb;this.$$parsedNgModel=e(d.ngModel);this.$$parsedNgModelAssign=this.$$parsedNgModel.assign;
+this.$$ngModelGet=this.$$parsedNgModel;this.$$ngModelSet=this.$$parsedNgModelAssign;this.$$pendingDebounce=null;this.$$parserValid=void 0;this.$$currentValidationRunId=0;Object.defineProperty(this,"$$scope",{value:a});this.$$attr=d;this.$$element=c;this.$$animate=f;this.$$timeout=g;this.$$parse=e;this.$$q=h;this.$$exceptionHandler=b;Xd(this);Rg(this)}function Rg(a){a.$$scope.$watch(function(b){b=a.$$ngModelGet(b);if(b!==a.$modelValue&&(a.$modelValue===a.$modelValue||b===b)){a.$modelValue=a.$$rawModelValue=
+b;a.$$parserValid=void 0;for(var d=a.$formatters,c=d.length,e=b;c--;)e=d[c](e);a.$viewValue!==e&&(a.$$updateEmptyClasses(e),a.$viewValue=a.$$lastCommittedViewValue=e,a.$render(),a.$$runValidators(a.$modelValue,a.$viewValue,z))}return b})}function Gc(a){this.$$options=a}function ee(a,b){q(b,function(b,c){u(a[c])||(a[c]=b)})}function Ta(a,b){a.prop("selected",b);a.attr("selected",b)}var Sg=/^\/(.+)\/([a-z]*)$/,ua=Object.prototype.hasOwnProperty,Ic={objectMaxDepth:5},Q=function(a){return F(a)?a.toLowerCase():
+a},ub=function(a){return F(a)?a.toUpperCase():a},za,B,na,va=[].slice,sg=[].splice,Tg=[].push,ma=Object.prototype.toString,Mc=Object.getPrototypeOf,Fa=L("ng"),ea=x.angular||(x.angular={}),ac,qb=0;za=x.document.documentMode;var da=Number.isNaN||function(a){return a!==a};z.$inject=[];Ya.$inject=[];var H=Array.isArray,qe=/^\[object (?:Uint8|Uint8Clamped|Uint16|Uint32|Int8|Int16|Int32|Float32|Float64)Array]$/,T=function(a){return F(a)?a.trim():a},Kd=function(a){return a.replace(/([-()[\]{}+?*.$^|,:#<!\\])/g,
+"\\$1").replace(/\x08/g,"\\x08")},Ga=function(){if(!u(Ga.rules)){var a=x.document.querySelector("[ng-csp]")||x.document.querySelector("[data-ng-csp]");if(a){var b=a.getAttribute("ng-csp")||a.getAttribute("data-ng-csp");Ga.rules={noUnsafeEval:!b||-1!==b.indexOf("no-unsafe-eval"),noInlineStyle:!b||-1!==b.indexOf("no-inline-style")}}else{a=Ga;try{new Function(""),b=!1}catch(d){b=!0}a.rules={noUnsafeEval:b,noInlineStyle:!1}}}return Ga.rules},rb=function(){if(u(rb.name_))return rb.name_;var a,b,d=Ja.length,
+c,e;for(b=0;b<d;++b)if(c=Ja[b],a=x.document.querySelector("["+c.replace(":","\\:")+"jq]")){e=a.getAttribute(c+"jq");break}return rb.name_=e},se=/:/g,Ja=["ng-","data-ng-","ng:","x-ng-"],ve=function(a){var b=a.currentScript;if(!b)return!0;if(!(b instanceof x.HTMLScriptElement||b instanceof x.SVGScriptElement))return!1;b=b.attributes;return[b.getNamedItem("src"),b.getNamedItem("href"),b.getNamedItem("xlink:href")].every(function(b){if(!b)return!0;if(!b.value)return!1;var c=a.createElement("a");c.href=
+b.value;if(a.location.origin===c.origin)return!0;switch(c.protocol){case "http:":case "https:":case "ftp:":case "blob:":case "file:":case "data:":return!0;default:return!1}})}(x.document),ye=/[A-Z]/g,Uc=!1,Ia=3,De={full:"1.6.4",major:1,minor:6,dot:4,codeName:"phenomenal-footnote"};W.expando="ng339";var hb=W.cache={},eg=1;W._data=function(a){return this.cache[a[this.expando]]||{}};var ag=/-([a-z])/g,Ug=/^-ms-/,zb={mouseleave:"mouseout",mouseenter:"mouseover"},dc=L("jqLite"),dg=/^<([\w-]+)\s*\/?>(?:<\/\1>|)$/,
+cc=/<|&#?\w+;/,bg=/<([\w:-]+)/,cg=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:-]+)[^>]*)\/>/gi,ha={option:[1,'<select multiple="multiple">',"</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ha.optgroup=ha.option;ha.tbody=ha.tfoot=ha.colgroup=ha.caption=ha.thead;ha.th=ha.td;var jg=x.Node.prototype.contains||function(a){return!!(this.compareDocumentPosition(a)&
+16)},Na=W.prototype={ready:ed,toString:function(){var a=[];q(this,function(b){a.push(""+b)});return"["+a.join(", ")+"]"},eq:function(a){return 0<=a?B(this[a]):B(this[this.length+a])},length:0,push:Tg,sort:[].sort,splice:[].splice},Fb={};q("multiple selected checked disabled readOnly required open".split(" "),function(a){Fb[Q(a)]=a});var jd={};q("input select option textarea button form details".split(" "),function(a){jd[a]=!0});var rd={ngMinlength:"minlength",ngMaxlength:"maxlength",ngMin:"min",ngMax:"max",
+ngPattern:"pattern",ngStep:"step"};q({data:hc,removeData:gc,hasData:function(a){for(var b in hb[a.ng339])return!0;return!1},cleanData:function(a){for(var b=0,d=a.length;b<d;b++)gc(a[b])}},function(a,b){W[b]=a});q({data:hc,inheritedData:Db,scope:function(a){return B.data(a,"$scope")||Db(a.parentNode||a,["$isolateScope","$scope"])},isolateScope:function(a){return B.data(a,"$isolateScope")||B.data(a,"$isolateScopeNoTemplate")},controller:gd,injector:function(a){return Db(a,"$injector")},removeAttr:function(a,
+b){a.removeAttribute(b)},hasClass:Ab,css:function(a,b,d){b=wb(b.replace(Ug,"ms-"));if(u(d))a.style[b]=d;else return a.style[b]},attr:function(a,b,d){var c=a.nodeType;if(c!==Ia&&2!==c&&8!==c&&a.getAttribute){var c=Q(b),e=Fb[c];if(u(d))null===d||!1===d&&e?a.removeAttribute(b):a.setAttribute(b,e?c:d);else return a=a.getAttribute(b),e&&null!==a&&(a=c),null===a?void 0:a}},prop:function(a,b,d){if(u(d))a[b]=d;else return a[b]},text:function(){function a(a,d){if(w(d)){var c=a.nodeType;return 1===c||c===Ia?
+a.textContent:""}a.textContent=d}a.$dv="";return a}(),val:function(a,b){if(w(b)){if(a.multiple&&"select"===wa(a)){var d=[];q(a.options,function(a){a.selected&&d.push(a.value||a.text)});return d}return a.value}a.value=b},html:function(a,b){if(w(b))return a.innerHTML;xb(a,!0);a.innerHTML=b},empty:hd},function(a,b){W.prototype[b]=function(b,c){var e,f,g=this.length;if(a!==hd&&w(2===a.length&&a!==Ab&&a!==gd?b:c)){if(C(b)){for(e=0;e<g;e++)if(a===hc)a(this[e],b);else for(f in b)a(this[e],f,b[f]);return this}e=
+a.$dv;g=w(e)?Math.min(g,1):g;for(f=0;f<g;f++){var h=a(this[f],b,c);e=e?e+h:h}return e}for(e=0;e<g;e++)a(this[e],b,c);return this}});q({removeData:gc,on:function(a,b,d,c){if(u(c))throw dc("onargs");if(bc(a)){c=yb(a,!0);var e=c.events,f=c.handle;f||(f=c.handle=gg(a,e));c=0<=b.indexOf(" ")?b.split(" "):[b];for(var g=c.length,h=function(b,c,g){var h=e[b];h||(h=e[b]=[],h.specialHandlerWrapper=c,"$destroy"===b||g||a.addEventListener(b,f));h.push(d)};g--;)b=c[g],zb[b]?(h(zb[b],ig),h(b,void 0,!0)):h(b)}},
+off:fd,one:function(a,b,d){a=B(a);a.on(b,function e(){a.off(b,d);a.off(b,e)});a.on(b,d)},replaceWith:function(a,b){var d,c=a.parentNode;xb(a);q(new W(b),function(b){d?c.insertBefore(b,d.nextSibling):c.replaceChild(b,a);d=b})},children:function(a){var b=[];q(a.childNodes,function(a){1===a.nodeType&&b.push(a)});return b},contents:function(a){return a.contentDocument||a.childNodes||[]},append:function(a,b){var d=a.nodeType;if(1===d||11===d){b=new W(b);for(var d=0,c=b.length;d<c;d++)a.appendChild(b[d])}},
+prepend:function(a,b){if(1===a.nodeType){var d=a.firstChild;q(new W(b),function(b){a.insertBefore(b,d)})}},wrap:function(a,b){var d=B(b).eq(0).clone()[0],c=a.parentNode;c&&c.replaceChild(d,a);d.appendChild(a)},remove:Eb,detach:function(a){Eb(a,!0)},after:function(a,b){var d=a,c=a.parentNode;if(c){b=new W(b);for(var e=0,f=b.length;e<f;e++){var g=b[e];c.insertBefore(g,d.nextSibling);d=g}}},addClass:Cb,removeClass:Bb,toggleClass:function(a,b,d){b&&q(b.split(" "),function(b){var e=d;w(e)&&(e=!Ab(a,b));
+(e?Cb:Bb)(a,b)})},parent:function(a){return(a=a.parentNode)&&11!==a.nodeType?a:null},next:function(a){return a.nextElementSibling},find:function(a,b){return a.getElementsByTagName?a.getElementsByTagName(b):[]},clone:fc,triggerHandler:function(a,b,d){var c,e,f=b.type||b,g=yb(a);if(g=(g=g&&g.events)&&g[f])c={preventDefault:function(){this.defaultPrevented=!0},isDefaultPrevented:function(){return!0===this.defaultPrevented},stopImmediatePropagation:function(){this.immediatePropagationStopped=!0},isImmediatePropagationStopped:function(){return!0===
+this.immediatePropagationStopped},stopPropagation:z,type:f,target:a},b.type&&(c=S(c,b)),b=pa(g),e=d?[c].concat(d):[c],q(b,function(b){c.isImmediatePropagationStopped()||b.apply(a,e)})}},function(a,b){W.prototype[b]=function(b,c,e){for(var f,g=0,h=this.length;g<h;g++)w(f)?(f=a(this[g],b,c,e),u(f)&&(f=B(f))):ec(f,a(this[g],b,c,e));return u(f)?f:this}});W.prototype.bind=W.prototype.on;W.prototype.unbind=W.prototype.off;var Vg=Object.create(null);kd.prototype={_idx:function(a){if(a===this._lastKey)return this._lastIndex;
+this._lastKey=a;return this._lastIndex=this._keys.indexOf(a)},_transformKey:function(a){return da(a)?Vg:a},get:function(a){a=this._transformKey(a);a=this._idx(a);if(-1!==a)return this._values[a]},set:function(a,b){a=this._transformKey(a);var d=this._idx(a);-1===d&&(d=this._lastIndex=this._keys.length);this._keys[d]=a;this._values[d]=b},delete:function(a){a=this._transformKey(a);a=this._idx(a);if(-1===a)return!1;this._keys.splice(a,1);this._values.splice(a,1);this._lastKey=NaN;this._lastIndex=-1;return!0}};
+var Gb=kd,Zf=[function(){this.$get=[function(){return Gb}]}],lg=/^([^(]+?)=>/,mg=/^[^(]*\(\s*([^)]*)\)/m,Wg=/,/,Xg=/^\s*(_?)(\S+?)\1\s*$/,kg=/((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg,ya=L("$injector");eb.$$annotate=function(a,b,d){var c;if("function"===typeof a){if(!(c=a.$inject)){c=[];if(a.length){if(b)throw F(d)&&d||(d=a.name||ng(a)),ya("strictdi",d);b=ld(a);q(b[1].split(Wg),function(a){a.replace(Xg,function(a,b,d){c.push(d)})})}a.$inject=c}}else H(a)?(b=a.length-1,sb(a[b],"fn"),c=a.slice(0,b)):sb(a,"fn",
+!0);return c};var fe=L("$animate"),qf=function(){this.$get=z},rf=function(){var a=new Gb,b=[];this.$get=["$$AnimateRunner","$rootScope",function(d,c){function e(a,b,c){var d=!1;b&&(b=F(b)?b.split(" "):H(b)?b:[],q(b,function(b){b&&(d=!0,a[b]=c)}));return d}function f(){q(b,function(b){var c=a.get(b);if(c){var d=og(b.attr("class")),e="",f="";q(c,function(a,b){a!==!!d[b]&&(a?e+=(e.length?" ":"")+b:f+=(f.length?" ":"")+b)});q(b,function(a){e&&Cb(a,e);f&&Bb(a,f)});a.delete(b)}});b.length=0}return{enabled:z,
+on:z,off:z,pin:z,push:function(g,h,k,l){l&&l();k=k||{};k.from&&g.css(k.from);k.to&&g.css(k.to);if(k.addClass||k.removeClass)if(h=k.addClass,l=k.removeClass,k=a.get(g)||{},h=e(k,h,!0),l=e(k,l,!1),h||l)a.set(g,k),b.push(g),1===b.length&&c.$$postDigest(f);g=new d;g.complete();return g}}}]},of=["$provide",function(a){var b=this,d=null;this.$$registeredAnimations=Object.create(null);this.register=function(c,d){if(c&&"."!==c.charAt(0))throw fe("notcsel",c);var f=c+"-animation";b.$$registeredAnimations[c.substr(1)]=
+f;a.factory(f,d)};this.classNameFilter=function(a){if(1===arguments.length&&(d=a instanceof RegExp?a:null)&&/[(\s|\/)]ng-animate[(\s|\/)]/.test(d.toString()))throw d=null,fe("nongcls","ng-animate");return d};this.$get=["$$animateQueue",function(a){function b(a,c,d){if(d){var e;a:{for(e=0;e<d.length;e++){var l=d[e];if(1===l.nodeType){e=l;break a}}e=void 0}!e||e.parentNode||e.previousElementSibling||(d=null)}d?d.after(a):c.prepend(a)}return{on:a.on,off:a.off,pin:a.pin,enabled:a.enabled,cancel:function(a){a.end&&
+a.end()},enter:function(d,g,h,k){g=g&&B(g);h=h&&B(h);g=g||h.parent();b(d,g,h);return a.push(d,"enter",ia(k))},move:function(d,g,h,k){g=g&&B(g);h=h&&B(h);g=g||h.parent();b(d,g,h);return a.push(d,"move",ia(k))},leave:function(b,d){return a.push(b,"leave",ia(d),function(){b.remove()})},addClass:function(b,d,e){e=ia(e);e.addClass=ib(e.addclass,d);return a.push(b,"addClass",e)},removeClass:function(b,d,e){e=ia(e);e.removeClass=ib(e.removeClass,d);return a.push(b,"removeClass",e)},setClass:function(b,d,
+e,k){k=ia(k);k.addClass=ib(k.addClass,d);k.removeClass=ib(k.removeClass,e);return a.push(b,"setClass",k)},animate:function(b,d,e,k,l){l=ia(l);l.from=l.from?S(l.from,d):d;l.to=l.to?S(l.to,e):e;l.tempClasses=ib(l.tempClasses,k||"ng-inline-animate");return a.push(b,"animate",l)}}}]}],tf=function(){this.$get=["$$rAF",function(a){function b(b){d.push(b);1<d.length||a(function(){for(var a=0;a<d.length;a++)d[a]();d=[]})}var d=[];return function(){var a=!1;b(function(){a=!0});return function(d){a?d():b(d)}}}]},
+sf=function(){this.$get=["$q","$sniffer","$$animateAsyncRun","$$isDocumentHidden","$timeout",function(a,b,d,c,e){function f(a){this.setHost(a);var b=d();this._doneCallbacks=[];this._tick=function(a){c()?e(a,0,!1):b(a)};this._state=0}f.chain=function(a,b){function c(){if(d===a.length)b(!0);else a[d](function(a){!1===a?b(!1):(d++,c())})}var d=0;c()};f.all=function(a,b){function c(f){e=e&&f;++d===a.length&&b(e)}var d=0,e=!0;q(a,function(a){a.done(c)})};f.prototype={setHost:function(a){this.host=a||{}},
+done:function(a){2===this._state?a():this._doneCallbacks.push(a)},progress:z,getPromise:function(){if(!this.promise){var b=this;this.promise=a(function(a,c){b.done(function(b){!1===b?c():a()})})}return this.promise},then:function(a,b){return this.getPromise().then(a,b)},"catch":function(a){return this.getPromise()["catch"](a)},"finally":function(a){return this.getPromise()["finally"](a)},pause:function(){this.host.pause&&this.host.pause()},resume:function(){this.host.resume&&this.host.resume()},end:function(){this.host.end&&
+this.host.end();this._resolve(!0)},cancel:function(){this.host.cancel&&this.host.cancel();this._resolve(!1)},complete:function(a){var b=this;0===b._state&&(b._state=1,b._tick(function(){b._resolve(a)}))},_resolve:function(a){2!==this._state&&(q(this._doneCallbacks,function(b){b(a)}),this._doneCallbacks.length=0,this._state=2)}};return f}]},pf=function(){this.$get=["$$rAF","$q","$$AnimateRunner",function(a,b,d){return function(b,e){function f(){a(function(){g.addClass&&(b.addClass(g.addClass),g.addClass=
+null);g.removeClass&&(b.removeClass(g.removeClass),g.removeClass=null);g.to&&(b.css(g.to),g.to=null);h||k.complete();h=!0});return k}var g=e||{};g.$$prepared||(g=ra(g));g.cleanupStyles&&(g.from=g.to=null);g.from&&(b.css(g.from),g.from=null);var h,k=new d;return{start:f,end:f}}}]},fa=L("$compile"),lc=new function(){};Wc.$inject=["$provide","$$sanitizeUriProvider"];Ib.prototype.isFirstChange=function(){return this.previousValue===lc};var md=/^((?:x|data)[:\-_])/i,rg=/[:\-_]+(.)/g,td=L("$controller"),
+sd=/^(\S+)(\s+as\s+([\w$]+))?$/,Af=function(){this.$get=["$document",function(a){return function(b){b?!b.nodeType&&b instanceof B&&(b=b[0]):b=a[0].body;return b.offsetWidth+1}}]},ud="application/json",pc={"Content-Type":ud+";charset=utf-8"},ug=/^\[|^\{(?!\{)/,vg={"[":/]$/,"{":/}$/},tg=/^\)]\}',?\n/,oc=L("$http"),Da=ea.$interpolateMinErr=L("$interpolate");Da.throwNoconcat=function(a){throw Da("noconcat",a);};Da.interr=function(a,b){return Da("interr",a,b.toString())};var If=function(){this.$get=function(){function a(a){var b=
+function(a){b.data=a;b.called=!0};b.id=a;return b}var b=ea.callbacks,d={};return{createCallback:function(c){c="_"+(b.$$counter++).toString(36);var e="angular.callbacks."+c,f=a(c);d[e]=b[c]=f;return e},wasCalled:function(a){return d[a].called},getResponse:function(a){return d[a].data},removeCallback:function(a){delete b[d[a].id];delete d[a]}}}},Yg=/^([^?#]*)(\?([^#]*))?(#(.*))?$/,xg={http:80,https:443,ftp:21},kb=L("$location"),yg=/^\s*[\\/]{2,}/,Zg={$$absUrl:"",$$html5:!1,$$replace:!1,absUrl:Jb("$$absUrl"),
+url:function(a){if(w(a))return this.$$url;var b=Yg.exec(a);(b[1]||""===a)&&this.path(decodeURIComponent(b[1]));(b[2]||b[1]||""===a)&&this.search(b[3]||"");this.hash(b[5]||"");return this},protocol:Jb("$$protocol"),host:Jb("$$host"),port:Jb("$$port"),path:Cd("$$path",function(a){a=null!==a?a.toString():"";return"/"===a.charAt(0)?a:"/"+a}),search:function(a,b){switch(arguments.length){case 0:return this.$$search;case 1:if(F(a)||ba(a))a=a.toString(),this.$$search=Rc(a);else if(C(a))a=ra(a,{}),q(a,function(b,
+c){null==b&&delete a[c]}),this.$$search=a;else throw kb("isrcharg");break;default:w(b)||null===b?delete this.$$search[a]:this.$$search[a]=b}this.$$compose();return this},hash:Cd("$$hash",function(a){return null!==a?a.toString():""}),replace:function(){this.$$replace=!0;return this}};q([Bd,tc,sc],function(a){a.prototype=Object.create(Zg);a.prototype.state=function(b){if(!arguments.length)return this.$$state;if(a!==sc||!this.$$html5)throw kb("nostate");this.$$state=w(b)?null:b;this.$$urlUpdatedByLocation=
+!0;return this}});var Ua=L("$parse"),Bg={}.constructor.prototype.valueOf,Qb=V();q("+ - * / % === !== == != < > <= >= && || ! = |".split(" "),function(a){Qb[a]=!0});var $g={n:"\n",f:"\f",r:"\r",t:"\t",v:"\v","'":"'",'"':'"'},wc=function(a){this.options=a};wc.prototype={constructor:wc,lex:function(a){this.text=a;this.index=0;for(this.tokens=[];this.index<this.text.length;)if(a=this.text.charAt(this.index),'"'===a||"'"===a)this.readString(a);else if(this.isNumber(a)||"."===a&&this.isNumber(this.peek()))this.readNumber();
+else if(this.isIdentifierStart(this.peekMultichar()))this.readIdent();else if(this.is(a,"(){}[].,;:?"))this.tokens.push({index:this.index,text:a}),this.index++;else if(this.isWhitespace(a))this.index++;else{var b=a+this.peek(),d=b+this.peek(2),c=Qb[b],e=Qb[d];Qb[a]||c||e?(a=e?d:c?b:a,this.tokens.push({index:this.index,text:a,operator:!0}),this.index+=a.length):this.throwError("Unexpected next character ",this.index,this.index+1)}return this.tokens},is:function(a,b){return-1!==b.indexOf(a)},peek:function(a){a=
+a||1;return this.index+a<this.text.length?this.text.charAt(this.index+a):!1},isNumber:function(a){return"0"<=a&&"9">=a&&"string"===typeof a},isWhitespace:function(a){return" "===a||"\r"===a||"\t"===a||"\n"===a||"\v"===a||"\u00a0"===a},isIdentifierStart:function(a){return this.options.isIdentifierStart?this.options.isIdentifierStart(a,this.codePointAt(a)):this.isValidIdentifierStart(a)},isValidIdentifierStart:function(a){return"a"<=a&&"z">=a||"A"<=a&&"Z">=a||"_"===a||"$"===a},isIdentifierContinue:function(a){return this.options.isIdentifierContinue?
+this.options.isIdentifierContinue(a,this.codePointAt(a)):this.isValidIdentifierContinue(a)},isValidIdentifierContinue:function(a,b){return this.isValidIdentifierStart(a,b)||this.isNumber(a)},codePointAt:function(a){return 1===a.length?a.charCodeAt(0):(a.charCodeAt(0)<<10)+a.charCodeAt(1)-56613888},peekMultichar:function(){var a=this.text.charAt(this.index),b=this.peek();if(!b)return a;var d=a.charCodeAt(0),c=b.charCodeAt(0);return 55296<=d&&56319>=d&&56320<=c&&57343>=c?a+b:a},isExpOperator:function(a){return"-"===
+a||"+"===a||this.isNumber(a)},throwError:function(a,b,d){d=d||this.index;b=u(b)?"s "+b+"-"+this.index+" ["+this.text.substring(b,d)+"]":" "+d;throw Ua("lexerr",a,b,this.text);},readNumber:function(){for(var a="",b=this.index;this.index<this.text.length;){var d=Q(this.text.charAt(this.index));if("."===d||this.isNumber(d))a+=d;else{var c=this.peek();if("e"===d&&this.isExpOperator(c))a+=d;else if(this.isExpOperator(d)&&c&&this.isNumber(c)&&"e"===a.charAt(a.length-1))a+=d;else if(!this.isExpOperator(d)||
+c&&this.isNumber(c)||"e"!==a.charAt(a.length-1))break;else this.throwError("Invalid exponent")}this.index++}this.tokens.push({index:b,text:a,constant:!0,value:Number(a)})},readIdent:function(){var a=this.index;for(this.index+=this.peekMultichar().length;this.index<this.text.length;){var b=this.peekMultichar();if(!this.isIdentifierContinue(b))break;this.index+=b.length}this.tokens.push({index:a,text:this.text.slice(a,this.index),identifier:!0})},readString:function(a){var b=this.index;this.index++;
+for(var d="",c=a,e=!1;this.index<this.text.length;){var f=this.text.charAt(this.index),c=c+f;if(e)"u"===f?(e=this.text.substring(this.index+1,this.index+5),e.match(/[\da-f]{4}/i)||this.throwError("Invalid unicode escape [\\u"+e+"]"),this.index+=4,d+=String.fromCharCode(parseInt(e,16))):d+=$g[f]||f,e=!1;else if("\\"===f)e=!0;else{if(f===a){this.index++;this.tokens.push({index:b,text:c,constant:!0,value:d});return}d+=f}this.index++}this.throwError("Unterminated quote",b)}};var s=function(a,b){this.lexer=
+a;this.options=b};s.Program="Program";s.ExpressionStatement="ExpressionStatement";s.AssignmentExpression="AssignmentExpression";s.ConditionalExpression="ConditionalExpression";s.LogicalExpression="LogicalExpression";s.BinaryExpression="BinaryExpression";s.UnaryExpression="UnaryExpression";s.CallExpression="CallExpression";s.MemberExpression="MemberExpression";s.Identifier="Identifier";s.Literal="Literal";s.ArrayExpression="ArrayExpression";s.Property="Property";s.ObjectExpression="ObjectExpression";
+s.ThisExpression="ThisExpression";s.LocalsExpression="LocalsExpression";s.NGValueParameter="NGValueParameter";s.prototype={ast:function(a){this.text=a;this.tokens=this.lexer.lex(a);a=this.program();0!==this.tokens.length&&this.throwError("is an unexpected token",this.tokens[0]);return a},program:function(){for(var a=[];;)if(0<this.tokens.length&&!this.peek("}",")",";","]")&&a.push(this.expressionStatement()),!this.expect(";"))return{type:s.Program,body:a}},expressionStatement:function(){return{type:s.ExpressionStatement,
+expression:this.filterChain()}},filterChain:function(){for(var a=this.expression();this.expect("|");)a=this.filter(a);return a},expression:function(){return this.assignment()},assignment:function(){var a=this.ternary();if(this.expect("=")){if(!Fd(a))throw Ua("lval");a={type:s.AssignmentExpression,left:a,right:this.assignment(),operator:"="}}return a},ternary:function(){var a=this.logicalOR(),b,d;return this.expect("?")&&(b=this.expression(),this.consume(":"))?(d=this.expression(),{type:s.ConditionalExpression,
+test:a,alternate:b,consequent:d}):a},logicalOR:function(){for(var a=this.logicalAND();this.expect("||");)a={type:s.LogicalExpression,operator:"||",left:a,right:this.logicalAND()};return a},logicalAND:function(){for(var a=this.equality();this.expect("&&");)a={type:s.LogicalExpression,operator:"&&",left:a,right:this.equality()};return a},equality:function(){for(var a=this.relational(),b;b=this.expect("==","!=","===","!==");)a={type:s.BinaryExpression,operator:b.text,left:a,right:this.relational()};
+return a},relational:function(){for(var a=this.additive(),b;b=this.expect("<",">","<=",">=");)a={type:s.BinaryExpression,operator:b.text,left:a,right:this.additive()};return a},additive:function(){for(var a=this.multiplicative(),b;b=this.expect("+","-");)a={type:s.BinaryExpression,operator:b.text,left:a,right:this.multiplicative()};return a},multiplicative:function(){for(var a=this.unary(),b;b=this.expect("*","/","%");)a={type:s.BinaryExpression,operator:b.text,left:a,right:this.unary()};return a},
+unary:function(){var a;return(a=this.expect("+","-","!"))?{type:s.UnaryExpression,operator:a.text,prefix:!0,argument:this.unary()}:this.primary()},primary:function(){var a;this.expect("(")?(a=this.filterChain(),this.consume(")")):this.expect("[")?a=this.arrayDeclaration():this.expect("{")?a=this.object():this.selfReferential.hasOwnProperty(this.peek().text)?a=ra(this.selfReferential[this.consume().text]):this.options.literals.hasOwnProperty(this.peek().text)?a={type:s.Literal,value:this.options.literals[this.consume().text]}:
+this.peek().identifier?a=this.identifier():this.peek().constant?a=this.constant():this.throwError("not a primary expression",this.peek());for(var b;b=this.expect("(","[",".");)"("===b.text?(a={type:s.CallExpression,callee:a,arguments:this.parseArguments()},this.consume(")")):"["===b.text?(a={type:s.MemberExpression,object:a,property:this.expression(),computed:!0},this.consume("]")):"."===b.text?a={type:s.MemberExpression,object:a,property:this.identifier(),computed:!1}:this.throwError("IMPOSSIBLE");
+return a},filter:function(a){a=[a];for(var b={type:s.CallExpression,callee:this.identifier(),arguments:a,filter:!0};this.expect(":");)a.push(this.expression());return b},parseArguments:function(){var a=[];if(")"!==this.peekToken().text){do a.push(this.filterChain());while(this.expect(","))}return a},identifier:function(){var a=this.consume();a.identifier||this.throwError("is not a valid identifier",a);return{type:s.Identifier,name:a.text}},constant:function(){return{type:s.Literal,value:this.consume().value}},
+arrayDeclaration:function(){var a=[];if("]"!==this.peekToken().text){do{if(this.peek("]"))break;a.push(this.expression())}while(this.expect(","))}this.consume("]");return{type:s.ArrayExpression,elements:a}},object:function(){var a=[],b;if("}"!==this.peekToken().text){do{if(this.peek("}"))break;b={type:s.Property,kind:"init"};this.peek().constant?(b.key=this.constant(),b.computed=!1,this.consume(":"),b.value=this.expression()):this.peek().identifier?(b.key=this.identifier(),b.computed=!1,this.peek(":")?
+(this.consume(":"),b.value=this.expression()):b.value=b.key):this.peek("[")?(this.consume("["),b.key=this.expression(),this.consume("]"),b.computed=!0,this.consume(":"),b.value=this.expression()):this.throwError("invalid key",this.peek());a.push(b)}while(this.expect(","))}this.consume("}");return{type:s.ObjectExpression,properties:a}},throwError:function(a,b){throw Ua("syntax",b.text,a,b.index+1,this.text,this.text.substring(b.index));},consume:function(a){if(0===this.tokens.length)throw Ua("ueoe",
+this.text);var b=this.expect(a);b||this.throwError("is unexpected, expecting ["+a+"]",this.peek());return b},peekToken:function(){if(0===this.tokens.length)throw Ua("ueoe",this.text);return this.tokens[0]},peek:function(a,b,d,c){return this.peekAhead(0,a,b,d,c)},peekAhead:function(a,b,d,c,e){if(this.tokens.length>a){a=this.tokens[a];var f=a.text;if(f===b||f===d||f===c||f===e||!(b||d||c||e))return a}return!1},expect:function(a,b,d,c){return(a=this.peek(a,b,d,c))?(this.tokens.shift(),a):!1},selfReferential:{"this":{type:s.ThisExpression},
+$locals:{type:s.LocalsExpression}}};Hd.prototype={compile:function(a){var b=this;this.state={nextId:0,filters:{},fn:{vars:[],body:[],own:{}},assign:{vars:[],body:[],own:{}},inputs:[]};U(a,b.$filter);var d="",c;this.stage="assign";if(c=Gd(a))this.state.computing="assign",d=this.nextId(),this.recurse(c,d),this.return_(d),d="fn.assign="+this.generateFunction("assign","s,v,l");c=Ed(a.body);b.stage="inputs";q(c,function(a,c){var d="fn"+c;b.state[d]={vars:[],body:[],own:{}};b.state.computing=d;var h=b.nextId();
+b.recurse(a,h);b.return_(h);b.state.inputs.push(d);a.watchId=c});this.state.computing="fn";this.stage="main";this.recurse(a);a='"'+this.USE+" "+this.STRICT+'";\n'+this.filterPrefix()+"var fn="+this.generateFunction("fn","s,l,a,i")+d+this.watchFns()+"return fn;";a=(new Function("$filter","getStringValue","ifDefined","plus",a))(this.$filter,zg,Ag,Dd);this.state=this.stage=void 0;return a},USE:"use",STRICT:"strict",watchFns:function(){var a=[],b=this.state.inputs,d=this;q(b,function(b){a.push("var "+
+b+"="+d.generateFunction(b,"s"))});b.length&&a.push("fn.inputs=["+b.join(",")+"];");return a.join("")},generateFunction:function(a,b){return"function("+b+"){"+this.varsPrefix(a)+this.body(a)+"};"},filterPrefix:function(){var a=[],b=this;q(this.state.filters,function(d,c){a.push(d+"=$filter("+b.escape(c)+")")});return a.length?"var "+a.join(",")+";":""},varsPrefix:function(a){return this.state[a].vars.length?"var "+this.state[a].vars.join(",")+";":""},body:function(a){return this.state[a].body.join("")},
+recurse:function(a,b,d,c,e,f){var g,h,k=this,l,m,n;c=c||z;if(!f&&u(a.watchId))b=b||this.nextId(),this.if_("i",this.lazyAssign(b,this.computedMember("i",a.watchId)),this.lazyRecurse(a,b,d,c,e,!0));else switch(a.type){case s.Program:q(a.body,function(b,c){k.recurse(b.expression,void 0,void 0,function(a){h=a});c!==a.body.length-1?k.current().body.push(h,";"):k.return_(h)});break;case s.Literal:m=this.escape(a.value);this.assign(b,m);c(b||m);break;case s.UnaryExpression:this.recurse(a.argument,void 0,
+void 0,function(a){h=a});m=a.operator+"("+this.ifDefined(h,0)+")";this.assign(b,m);c(m);break;case s.BinaryExpression:this.recurse(a.left,void 0,void 0,function(a){g=a});this.recurse(a.right,void 0,void 0,function(a){h=a});m="+"===a.operator?this.plus(g,h):"-"===a.operator?this.ifDefined(g,0)+a.operator+this.ifDefined(h,0):"("+g+")"+a.operator+"("+h+")";this.assign(b,m);c(m);break;case s.LogicalExpression:b=b||this.nextId();k.recurse(a.left,b);k.if_("&&"===a.operator?b:k.not(b),k.lazyRecurse(a.right,
+b));c(b);break;case s.ConditionalExpression:b=b||this.nextId();k.recurse(a.test,b);k.if_(b,k.lazyRecurse(a.alternate,b),k.lazyRecurse(a.consequent,b));c(b);break;case s.Identifier:b=b||this.nextId();d&&(d.context="inputs"===k.stage?"s":this.assign(this.nextId(),this.getHasOwnProperty("l",a.name)+"?l:s"),d.computed=!1,d.name=a.name);k.if_("inputs"===k.stage||k.not(k.getHasOwnProperty("l",a.name)),function(){k.if_("inputs"===k.stage||"s",function(){e&&1!==e&&k.if_(k.isNull(k.nonComputedMember("s",a.name)),
+k.lazyAssign(k.nonComputedMember("s",a.name),"{}"));k.assign(b,k.nonComputedMember("s",a.name))})},b&&k.lazyAssign(b,k.nonComputedMember("l",a.name)));c(b);break;case s.MemberExpression:g=d&&(d.context=this.nextId())||this.nextId();b=b||this.nextId();k.recurse(a.object,g,void 0,function(){k.if_(k.notNull(g),function(){a.computed?(h=k.nextId(),k.recurse(a.property,h),k.getStringValue(h),e&&1!==e&&k.if_(k.not(k.computedMember(g,h)),k.lazyAssign(k.computedMember(g,h),"{}")),m=k.computedMember(g,h),k.assign(b,
+m),d&&(d.computed=!0,d.name=h)):(e&&1!==e&&k.if_(k.isNull(k.nonComputedMember(g,a.property.name)),k.lazyAssign(k.nonComputedMember(g,a.property.name),"{}")),m=k.nonComputedMember(g,a.property.name),k.assign(b,m),d&&(d.computed=!1,d.name=a.property.name))},function(){k.assign(b,"undefined")});c(b)},!!e);break;case s.CallExpression:b=b||this.nextId();a.filter?(h=k.filter(a.callee.name),l=[],q(a.arguments,function(a){var b=k.nextId();k.recurse(a,b);l.push(b)}),m=h+"("+l.join(",")+")",k.assign(b,m),c(b)):
+(h=k.nextId(),g={},l=[],k.recurse(a.callee,h,g,function(){k.if_(k.notNull(h),function(){q(a.arguments,function(b){k.recurse(b,a.constant?void 0:k.nextId(),void 0,function(a){l.push(a)})});m=g.name?k.member(g.context,g.name,g.computed)+"("+l.join(",")+")":h+"("+l.join(",")+")";k.assign(b,m)},function(){k.assign(b,"undefined")});c(b)}));break;case s.AssignmentExpression:h=this.nextId();g={};this.recurse(a.left,void 0,g,function(){k.if_(k.notNull(g.context),function(){k.recurse(a.right,h);m=k.member(g.context,
+g.name,g.computed)+a.operator+h;k.assign(b,m);c(b||m)})},1);break;case s.ArrayExpression:l=[];q(a.elements,function(b){k.recurse(b,a.constant?void 0:k.nextId(),void 0,function(a){l.push(a)})});m="["+l.join(",")+"]";this.assign(b,m);c(b||m);break;case s.ObjectExpression:l=[];n=!1;q(a.properties,function(a){a.computed&&(n=!0)});n?(b=b||this.nextId(),this.assign(b,"{}"),q(a.properties,function(a){a.computed?(g=k.nextId(),k.recurse(a.key,g)):g=a.key.type===s.Identifier?a.key.name:""+a.key.value;h=k.nextId();
+k.recurse(a.value,h);k.assign(k.member(b,g,a.computed),h)})):(q(a.properties,function(b){k.recurse(b.value,a.constant?void 0:k.nextId(),void 0,function(a){l.push(k.escape(b.key.type===s.Identifier?b.key.name:""+b.key.value)+":"+a)})}),m="{"+l.join(",")+"}",this.assign(b,m));c(b||m);break;case s.ThisExpression:this.assign(b,"s");c(b||"s");break;case s.LocalsExpression:this.assign(b,"l");c(b||"l");break;case s.NGValueParameter:this.assign(b,"v"),c(b||"v")}},getHasOwnProperty:function(a,b){var d=a+"."+
+b,c=this.current().own;c.hasOwnProperty(d)||(c[d]=this.nextId(!1,a+"&&("+this.escape(b)+" in "+a+")"));return c[d]},assign:function(a,b){if(a)return this.current().body.push(a,"=",b,";"),a},filter:function(a){this.state.filters.hasOwnProperty(a)||(this.state.filters[a]=this.nextId(!0));return this.state.filters[a]},ifDefined:function(a,b){return"ifDefined("+a+","+this.escape(b)+")"},plus:function(a,b){return"plus("+a+","+b+")"},return_:function(a){this.current().body.push("return ",a,";")},if_:function(a,
+b,d){if(!0===a)b();else{var c=this.current().body;c.push("if(",a,"){");b();c.push("}");d&&(c.push("else{"),d(),c.push("}"))}},not:function(a){return"!("+a+")"},isNull:function(a){return a+"==null"},notNull:function(a){return a+"!=null"},nonComputedMember:function(a,b){var d=/[^$_a-zA-Z0-9]/g;return/^[$_a-zA-Z][$_a-zA-Z0-9]*$/.test(b)?a+"."+b:a+'["'+b.replace(d,this.stringEscapeFn)+'"]'},computedMember:function(a,b){return a+"["+b+"]"},member:function(a,b,d){return d?this.computedMember(a,b):this.nonComputedMember(a,
+b)},getStringValue:function(a){this.assign(a,"getStringValue("+a+")")},lazyRecurse:function(a,b,d,c,e,f){var g=this;return function(){g.recurse(a,b,d,c,e,f)}},lazyAssign:function(a,b){var d=this;return function(){d.assign(a,b)}},stringEscapeRegex:/[^ a-zA-Z0-9]/g,stringEscapeFn:function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)},escape:function(a){if(F(a))return"'"+a.replace(this.stringEscapeRegex,this.stringEscapeFn)+"'";if(ba(a))return a.toString();if(!0===a)return"true";if(!1===
+a)return"false";if(null===a)return"null";if("undefined"===typeof a)return"undefined";throw Ua("esc");},nextId:function(a,b){var d="v"+this.state.nextId++;a||this.current().vars.push(d+(b?"="+b:""));return d},current:function(){return this.state[this.state.computing]}};Id.prototype={compile:function(a){var b=this;U(a,b.$filter);var d,c;if(d=Gd(a))c=this.recurse(d);d=Ed(a.body);var e;d&&(e=[],q(d,function(a,c){var d=b.recurse(a);a.input=d;e.push(d);a.watchId=c}));var f=[];q(a.body,function(a){f.push(b.recurse(a.expression))});
+a=0===a.body.length?z:1===a.body.length?f[0]:function(a,b){var c;q(f,function(d){c=d(a,b)});return c};c&&(a.assign=function(a,b,d){return c(a,d,b)});e&&(a.inputs=e);return a},recurse:function(a,b,d){var c,e,f=this,g;if(a.input)return this.inputs(a.input,a.watchId);switch(a.type){case s.Literal:return this.value(a.value,b);case s.UnaryExpression:return e=this.recurse(a.argument),this["unary"+a.operator](e,b);case s.BinaryExpression:return c=this.recurse(a.left),e=this.recurse(a.right),this["binary"+
+a.operator](c,e,b);case s.LogicalExpression:return c=this.recurse(a.left),e=this.recurse(a.right),this["binary"+a.operator](c,e,b);case s.ConditionalExpression:return this["ternary?:"](this.recurse(a.test),this.recurse(a.alternate),this.recurse(a.consequent),b);case s.Identifier:return f.identifier(a.name,b,d);case s.MemberExpression:return c=this.recurse(a.object,!1,!!d),a.computed||(e=a.property.name),a.computed&&(e=this.recurse(a.property)),a.computed?this.computedMember(c,e,b,d):this.nonComputedMember(c,
+e,b,d);case s.CallExpression:return g=[],q(a.arguments,function(a){g.push(f.recurse(a))}),a.filter&&(e=this.$filter(a.callee.name)),a.filter||(e=this.recurse(a.callee,!0)),a.filter?function(a,c,d,f){for(var n=[],p=0;p<g.length;++p)n.push(g[p](a,c,d,f));a=e.apply(void 0,n,f);return b?{context:void 0,name:void 0,value:a}:a}:function(a,c,d,f){var n=e(a,c,d,f),p;if(null!=n.value){p=[];for(var r=0;r<g.length;++r)p.push(g[r](a,c,d,f));p=n.value.apply(n.context,p)}return b?{value:p}:p};case s.AssignmentExpression:return c=
+this.recurse(a.left,!0,1),e=this.recurse(a.right),function(a,d,f,g){var n=c(a,d,f,g);a=e(a,d,f,g);n.context[n.name]=a;return b?{value:a}:a};case s.ArrayExpression:return g=[],q(a.elements,function(a){g.push(f.recurse(a))}),function(a,c,d,e){for(var f=[],p=0;p<g.length;++p)f.push(g[p](a,c,d,e));return b?{value:f}:f};case s.ObjectExpression:return g=[],q(a.properties,function(a){a.computed?g.push({key:f.recurse(a.key),computed:!0,value:f.recurse(a.value)}):g.push({key:a.key.type===s.Identifier?a.key.name:
+""+a.key.value,computed:!1,value:f.recurse(a.value)})}),function(a,c,d,e){for(var f={},p=0;p<g.length;++p)g[p].computed?f[g[p].key(a,c,d,e)]=g[p].value(a,c,d,e):f[g[p].key]=g[p].value(a,c,d,e);return b?{value:f}:f};case s.ThisExpression:return function(a){return b?{value:a}:a};case s.LocalsExpression:return function(a,c){return b?{value:c}:c};case s.NGValueParameter:return function(a,c,d){return b?{value:d}:d}}},"unary+":function(a,b){return function(d,c,e,f){d=a(d,c,e,f);d=u(d)?+d:0;return b?{value:d}:
+d}},"unary-":function(a,b){return function(d,c,e,f){d=a(d,c,e,f);d=u(d)?-d:-0;return b?{value:d}:d}},"unary!":function(a,b){return function(d,c,e,f){d=!a(d,c,e,f);return b?{value:d}:d}},"binary+":function(a,b,d){return function(c,e,f,g){var h=a(c,e,f,g);c=b(c,e,f,g);h=Dd(h,c);return d?{value:h}:h}},"binary-":function(a,b,d){return function(c,e,f,g){var h=a(c,e,f,g);c=b(c,e,f,g);h=(u(h)?h:0)-(u(c)?c:0);return d?{value:h}:h}},"binary*":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)*b(c,e,f,g);
+return d?{value:c}:c}},"binary/":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)/b(c,e,f,g);return d?{value:c}:c}},"binary%":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)%b(c,e,f,g);return d?{value:c}:c}},"binary===":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)===b(c,e,f,g);return d?{value:c}:c}},"binary!==":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)!==b(c,e,f,g);return d?{value:c}:c}},"binary==":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)==b(c,e,f,g);return d?
+{value:c}:c}},"binary!=":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)!=b(c,e,f,g);return d?{value:c}:c}},"binary<":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)<b(c,e,f,g);return d?{value:c}:c}},"binary>":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)>b(c,e,f,g);return d?{value:c}:c}},"binary<=":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)<=b(c,e,f,g);return d?{value:c}:c}},"binary>=":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)>=b(c,e,f,g);return d?{value:c}:
+c}},"binary&&":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)&&b(c,e,f,g);return d?{value:c}:c}},"binary||":function(a,b,d){return function(c,e,f,g){c=a(c,e,f,g)||b(c,e,f,g);return d?{value:c}:c}},"ternary?:":function(a,b,d,c){return function(e,f,g,h){e=a(e,f,g,h)?b(e,f,g,h):d(e,f,g,h);return c?{value:e}:e}},value:function(a,b){return function(){return b?{context:void 0,name:void 0,value:a}:a}},identifier:function(a,b,d){return function(c,e,f,g){c=e&&a in e?e:c;d&&1!==d&&c&&null==c[a]&&(c[a]=
+{});e=c?c[a]:void 0;return b?{context:c,name:a,value:e}:e}},computedMember:function(a,b,d,c){return function(e,f,g,h){var k=a(e,f,g,h),l,m;null!=k&&(l=b(e,f,g,h),l+="",c&&1!==c&&k&&!k[l]&&(k[l]={}),m=k[l]);return d?{context:k,name:l,value:m}:m}},nonComputedMember:function(a,b,d,c){return function(e,f,g,h){e=a(e,f,g,h);c&&1!==c&&e&&null==e[b]&&(e[b]={});f=null!=e?e[b]:void 0;return d?{context:e,name:b,value:f}:f}},inputs:function(a,b){return function(d,c,e,f){return f?f[b]:a(d,c,e)}}};uc.prototype=
+{constructor:uc,parse:function(a){a=this.ast.ast(a);var b=this.astCompiler.compile(a);b.literal=0===a.body.length||1===a.body.length&&(a.body[0].expression.type===s.Literal||a.body[0].expression.type===s.ArrayExpression||a.body[0].expression.type===s.ObjectExpression);b.constant=a.constant;return b}};var ta=L("$sce"),oa={HTML:"html",CSS:"css",URL:"url",RESOURCE_URL:"resourceUrl",JS:"js"},xc=/_([a-z])/g,Dg=L("$compile"),aa=x.document.createElement("a"),Md=Ca(x.location.href);Nd.$inject=["$document"];
+cd.$inject=["$provide"];var Ud=22,Td=".",zc="0";Od.$inject=["$locale"];Qd.$inject=["$locale"];var Og={yyyy:Y("FullYear",4,0,!1,!0),yy:Y("FullYear",2,0,!0,!0),y:Y("FullYear",1,0,!1,!0),MMMM:mb("Month"),MMM:mb("Month",!0),MM:Y("Month",2,1),M:Y("Month",1,1),LLLL:mb("Month",!1,!0),dd:Y("Date",2),d:Y("Date",1),HH:Y("Hours",2),H:Y("Hours",1),hh:Y("Hours",2,-12),h:Y("Hours",1,-12),mm:Y("Minutes",2),m:Y("Minutes",1),ss:Y("Seconds",2),s:Y("Seconds",1),sss:Y("Milliseconds",3),EEEE:mb("Day"),EEE:mb("Day",!0),
+a:function(a,b){return 12>a.getHours()?b.AMPMS[0]:b.AMPMS[1]},Z:function(a,b,d){a=-1*d;return a=(0<=a?"+":"")+(Kb(Math[0<a?"floor":"ceil"](a/60),2)+Kb(Math.abs(a%60),2))},ww:Wd(2),w:Wd(1),G:Ac,GG:Ac,GGG:Ac,GGGG:function(a,b){return 0>=a.getFullYear()?b.ERANAMES[0]:b.ERANAMES[1]}},Ng=/((?:[^yMLdHhmsaZEwG']+)|(?:'(?:[^']|'')*')|(?:E+|y+|M+|L+|d+|H+|h+|m+|s+|a|Z|G+|w+))([\s\S]*)/,Mg=/^-?\d+$/;Pd.$inject=["$locale"];var Hg=la(Q),Ig=la(ub);Rd.$inject=["$parse"];var Fe=la({restrict:"E",compile:function(a,
+b){if(!b.href&&!b.xlinkHref)return function(a,b){if("a"===b[0].nodeName.toLowerCase()){var e="[object SVGAnimatedString]"===ma.call(b.prop("href"))?"xlink:href":"href";b.on("click",function(a){b.attr(e)||a.preventDefault()})}}}}),vb={};q(Fb,function(a,b){function d(a,d,e){a.$watch(e[c],function(a){e.$set(b,!!a)})}if("multiple"!==a){var c=Ba("ng-"+b),e=d;"checked"===a&&(e=function(a,b,e){e.ngModel!==e[c]&&d(a,b,e)});vb[c]=function(){return{restrict:"A",priority:100,link:e}}}});q(rd,function(a,b){vb[b]=
+function(){return{priority:100,link:function(a,c,e){if("ngPattern"===b&&"/"===e.ngPattern.charAt(0)&&(c=e.ngPattern.match(Sg))){e.$set("ngPattern",new RegExp(c[1],c[2]));return}a.$watch(e[b],function(a){e.$set(b,a)})}}}});q(["src","srcset","href"],function(a){var b=Ba("ng-"+a);vb[b]=function(){return{priority:99,link:function(d,c,e){var f=a,g=a;"href"===a&&"[object SVGAnimatedString]"===ma.call(c.prop("href"))&&(g="xlinkHref",e.$attr[g]="xlink:href",f=null);e.$observe(b,function(b){b?(e.$set(g,b),
+za&&f&&c.prop(f,e[g])):"href"===a&&e.$set(g,null)})}}}});var Mb={$addControl:z,$$renameControl:function(a,b){a.$name=b},$removeControl:z,$setValidity:z,$setDirty:z,$setPristine:z,$setSubmitted:z};Lb.$inject=["$element","$attrs","$scope","$animate","$interpolate"];Lb.prototype={$rollbackViewValue:function(){q(this.$$controls,function(a){a.$rollbackViewValue()})},$commitViewValue:function(){q(this.$$controls,function(a){a.$commitViewValue()})},$addControl:function(a){Ka(a.$name,"input");this.$$controls.push(a);
+a.$name&&(this[a.$name]=a);a.$$parentForm=this},$$renameControl:function(a,b){var d=a.$name;this[d]===a&&delete this[d];this[b]=a;a.$name=b},$removeControl:function(a){a.$name&&this[a.$name]===a&&delete this[a.$name];q(this.$pending,function(b,d){this.$setValidity(d,null,a)},this);q(this.$error,function(b,d){this.$setValidity(d,null,a)},this);q(this.$$success,function(b,d){this.$setValidity(d,null,a)},this);$a(this.$$controls,a);a.$$parentForm=Mb},$setDirty:function(){this.$$animate.removeClass(this.$$element,
+Va);this.$$animate.addClass(this.$$element,Rb);this.$dirty=!0;this.$pristine=!1;this.$$parentForm.$setDirty()},$setPristine:function(){this.$$animate.setClass(this.$$element,Va,Rb+" ng-submitted");this.$dirty=!1;this.$pristine=!0;this.$submitted=!1;q(this.$$controls,function(a){a.$setPristine()})},$setUntouched:function(){q(this.$$controls,function(a){a.$setUntouched()})},$setSubmitted:function(){this.$$animate.addClass(this.$$element,"ng-submitted");this.$submitted=!0;this.$$parentForm.$setSubmitted()}};
+Zd({clazz:Lb,set:function(a,b,d){var c=a[b];c?-1===c.indexOf(d)&&c.push(d):a[b]=[d]},unset:function(a,b,d){var c=a[b];c&&($a(c,d),0===c.length&&delete a[b])}});var ge=function(a){return["$timeout","$parse",function(b,d){function c(a){return""===a?d('this[""]').assign:d(a).assign||z}return{name:"form",restrict:a?"EAC":"E",require:["form","^^?form"],controller:Lb,compile:function(d,f){d.addClass(Va).addClass(nb);var g=f.name?"name":a&&f.ngForm?"ngForm":!1;return{pre:function(a,d,e,f){var n=f[0];if(!("action"in
+e)){var p=function(b){a.$apply(function(){n.$commitViewValue();n.$setSubmitted()});b.preventDefault()};d[0].addEventListener("submit",p);d.on("$destroy",function(){b(function(){d[0].removeEventListener("submit",p)},0,!1)})}(f[1]||n.$$parentForm).$addControl(n);var r=g?c(n.$name):z;g&&(r(a,n),e.$observe(g,function(b){n.$name!==b&&(r(a,void 0),n.$$parentForm.$$renameControl(n,b),r=c(n.$name),r(a,n))}));d.on("$destroy",function(){n.$$parentForm.$removeControl(n);r(a,void 0);S(n,Mb)})}}}}}]},Ge=ge(),
+Se=ge(!0),Pg=/^\d{4,}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+(?:[+-][0-2]\d:[0-5]\d|Z)$/,ah=/^[a-z][a-z\d.+-]*:\/*(?:[^:@]+(?::[^@]+)?@)?(?:[^\s:/?#]+|\[[a-f\d:]+])(?::\d+)?(?:\/[^?#]*)?(?:\?[^#]*)?(?:#.*)?$/i,bh=/^(?=.{1,254}$)(?=.{1,64}@)[-!#$%&'*+/0-9=?A-Z^_`a-z{|}~]+(\.[-!#$%&'*+/0-9=?A-Z^_`a-z{|}~]+)*@[A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?(\.[A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?)*$/,Qg=/^\s*(-|\+)?(\d+|(\d*(\.\d*)))([eE][+-]?\d+)?\s*$/,he=/^(\d{4,})-(\d{2})-(\d{2})$/,ie=/^(\d{4,})-(\d\d)-(\d\d)T(\d\d):(\d\d)(?::(\d\d)(\.\d{1,3})?)?$/,
+Hc=/^(\d{4,})-W(\d\d)$/,je=/^(\d{4,})-(\d\d)$/,ke=/^(\d\d):(\d\d)(?::(\d\d)(\.\d{1,3})?)?$/,ae=V();q(["date","datetime-local","month","time","week"],function(a){ae[a]=!0});var le={text:function(a,b,d,c,e,f){Ra(a,b,d,c,e,f);Cc(c)},date:ob("date",he,Nb(he,["yyyy","MM","dd"]),"yyyy-MM-dd"),"datetime-local":ob("datetimelocal",ie,Nb(ie,"yyyy MM dd HH mm ss sss".split(" ")),"yyyy-MM-ddTHH:mm:ss.sss"),time:ob("time",ke,Nb(ke,["HH","mm","ss","sss"]),"HH:mm:ss.sss"),week:ob("week",Hc,function(a,b){if(ga(a))return a;
+if(F(a)){Hc.lastIndex=0;var d=Hc.exec(a);if(d){var c=+d[1],e=+d[2],f=d=0,g=0,h=0,k=Vd(c),e=7*(e-1);b&&(d=b.getHours(),f=b.getMinutes(),g=b.getSeconds(),h=b.getMilliseconds());return new Date(c,0,k.getDate()+e,d,f,g,h)}}return NaN},"yyyy-Www"),month:ob("month",je,Nb(je,["yyyy","MM"]),"yyyy-MM"),number:function(a,b,d,c,e,f){Dc(a,b,d,c);be(c);Ra(a,b,d,c,e,f);var g,h;if(u(d.min)||d.ngMin)c.$validators.min=function(a){return c.$isEmpty(a)||w(g)||a>=g},d.$observe("min",function(a){g=Sa(a);c.$validate()});
+if(u(d.max)||d.ngMax)c.$validators.max=function(a){return c.$isEmpty(a)||w(h)||a<=h},d.$observe("max",function(a){h=Sa(a);c.$validate()});if(u(d.step)||d.ngStep){var k;c.$validators.step=function(a,b){return c.$isEmpty(b)||w(k)||ce(b,g||0,k)};d.$observe("step",function(a){k=Sa(a);c.$validate()})}},url:function(a,b,d,c,e,f){Ra(a,b,d,c,e,f);Cc(c);c.$$parserName="url";c.$validators.url=function(a,b){var d=a||b;return c.$isEmpty(d)||ah.test(d)}},email:function(a,b,d,c,e,f){Ra(a,b,d,c,e,f);Cc(c);c.$$parserName=
+"email";c.$validators.email=function(a,b){var d=a||b;return c.$isEmpty(d)||bh.test(d)}},radio:function(a,b,d,c){var e=!d.ngTrim||"false"!==T(d.ngTrim);w(d.name)&&b.attr("name",++qb);b.on("click",function(a){var g;b[0].checked&&(g=d.value,e&&(g=T(g)),c.$setViewValue(g,a&&a.type))});c.$render=function(){var a=d.value;e&&(a=T(a));b[0].checked=a===c.$viewValue};d.$observe("value",c.$render)},range:function(a,b,d,c,e,f){function g(a,c){b.attr(a,d[a]);d.$observe(a,c)}function h(a){n=Sa(a);da(c.$modelValue)||
+(m?(a=b.val(),n>a&&(a=n,b.val(a)),c.$setViewValue(a)):c.$validate())}function k(a){p=Sa(a);da(c.$modelValue)||(m?(a=b.val(),p<a&&(b.val(p),a=p<n?n:p),c.$setViewValue(a)):c.$validate())}function l(a){r=Sa(a);da(c.$modelValue)||(m&&c.$viewValue!==b.val()?c.$setViewValue(b.val()):c.$validate())}Dc(a,b,d,c);be(c);Ra(a,b,d,c,e,f);var m=c.$$hasNativeValidators&&"range"===b[0].type,n=m?0:void 0,p=m?100:void 0,r=m?1:void 0,q=b[0].validity;a=u(d.min);e=u(d.max);f=u(d.step);var s=c.$render;c.$render=m&&u(q.rangeUnderflow)&&
+u(q.rangeOverflow)?function(){s();c.$setViewValue(b.val())}:s;a&&(c.$validators.min=m?function(){return!0}:function(a,b){return c.$isEmpty(b)||w(n)||b>=n},g("min",h));e&&(c.$validators.max=m?function(){return!0}:function(a,b){return c.$isEmpty(b)||w(p)||b<=p},g("max",k));f&&(c.$validators.step=m?function(){return!q.stepMismatch}:function(a,b){return c.$isEmpty(b)||w(r)||ce(b,n||0,r)},g("step",l))},checkbox:function(a,b,d,c,e,f,g,h){var k=de(h,a,"ngTrueValue",d.ngTrueValue,!0),l=de(h,a,"ngFalseValue",
+d.ngFalseValue,!1);b.on("click",function(a){c.$setViewValue(b[0].checked,a&&a.type)});c.$render=function(){b[0].checked=c.$viewValue};c.$isEmpty=function(a){return!1===a};c.$formatters.push(function(a){return sa(a,k)});c.$parsers.push(function(a){return a?k:l})},hidden:z,button:z,submit:z,reset:z,file:z},Xc=["$browser","$sniffer","$filter","$parse",function(a,b,d,c){return{restrict:"E",require:["?ngModel"],link:{pre:function(e,f,g,h){h[0]&&(le[Q(g.type)]||le.text)(e,f,g,h[0],b,a,d,c)}}}}],ch=/^(true|false|\d+)$/,
+kf=function(){function a(a,d,c){var e=u(c)?c:9===za?"":null;a.prop("value",e);d.$set("value",c)}return{restrict:"A",priority:100,compile:function(b,d){return ch.test(d.ngValue)?function(b,d,f){b=b.$eval(f.ngValue);a(d,f,b)}:function(b,d,f){b.$watch(f.ngValue,function(b){a(d,f,b)})}}}},Ke=["$compile",function(a){return{restrict:"AC",compile:function(b){a.$$addBindingClass(b);return function(b,c,e){a.$$addBindingInfo(c,e.ngBind);c=c[0];b.$watch(e.ngBind,function(a){c.textContent=$b(a)})}}}}],Me=["$interpolate",
+"$compile",function(a,b){return{compile:function(d){b.$$addBindingClass(d);return function(c,d,f){c=a(d.attr(f.$attr.ngBindTemplate));b.$$addBindingInfo(d,c.expressions);d=d[0];f.$observe("ngBindTemplate",function(a){d.textContent=w(a)?"":a})}}}}],Le=["$sce","$parse","$compile",function(a,b,d){return{restrict:"A",compile:function(c,e){var f=b(e.ngBindHtml),g=b(e.ngBindHtml,function(b){return a.valueOf(b)});d.$$addBindingClass(c);return function(b,c,e){d.$$addBindingInfo(c,e.ngBindHtml);b.$watch(g,
+function(){var d=f(b);c.html(a.getTrustedHtml(d)||"")})}}}}],jf=la({restrict:"A",require:"ngModel",link:function(a,b,d,c){c.$viewChangeListeners.push(function(){a.$eval(d.ngChange)})}}),Ne=Fc("",!0),Pe=Fc("Odd",0),Oe=Fc("Even",1),Qe=Qa({compile:function(a,b){b.$set("ngCloak",void 0);a.removeClass("ng-cloak")}}),Re=[function(){return{restrict:"A",scope:!0,controller:"@",priority:500}}],bd={},dh={blur:!0,focus:!0};q("click dblclick mousedown mouseup mouseover mouseout mousemove mouseenter mouseleave keydown keyup keypress submit focus blur copy cut paste".split(" "),
+function(a){var b=Ba("ng-"+a);bd[b]=["$parse","$rootScope",function(d,c){return{restrict:"A",compile:function(e,f){var g=d(f[b]);return function(b,d){d.on(a,function(d){var e=function(){g(b,{$event:d})};dh[a]&&c.$$phase?b.$evalAsync(e):b.$apply(e)})}}}}]});var Ue=["$animate","$compile",function(a,b){return{multiElement:!0,transclude:"element",priority:600,terminal:!0,restrict:"A",$$tlb:!0,link:function(d,c,e,f,g){var h,k,l;d.$watch(e.ngIf,function(d){d?k||g(function(d,f){k=f;d[d.length++]=b.$$createComment("end ngIf",
+e.ngIf);h={clone:d};a.enter(d,c.parent(),c)}):(l&&(l.remove(),l=null),k&&(k.$destroy(),k=null),h&&(l=tb(h.clone),a.leave(l).done(function(a){!1!==a&&(l=null)}),h=null))})}}}],Ve=["$templateRequest","$anchorScroll","$animate",function(a,b,d){return{restrict:"ECA",priority:400,terminal:!0,transclude:"element",controller:ea.noop,compile:function(c,e){var f=e.ngInclude||e.src,g=e.onload||"",h=e.autoscroll;return function(c,e,m,n,p){var r=0,q,s,t,w=function(){s&&(s.remove(),s=null);q&&(q.$destroy(),q=
+null);t&&(d.leave(t).done(function(a){!1!==a&&(s=null)}),s=t,t=null)};c.$watch(f,function(f){var m=function(a){!1===a||!u(h)||h&&!c.$eval(h)||b()},s=++r;f?(a(f,!0).then(function(a){if(!c.$$destroyed&&s===r){var b=c.$new();n.template=a;a=p(b,function(a){w();d.enter(a,null,e).done(m)});q=b;t=a;q.$emit("$includeContentLoaded",f);c.$eval(g)}},function(){c.$$destroyed||s!==r||(w(),c.$emit("$includeContentError",f))}),c.$emit("$includeContentRequested",f)):(w(),n.template=null)})}}}}],mf=["$compile",function(a){return{restrict:"ECA",
+priority:-400,require:"ngInclude",link:function(b,d,c,e){ma.call(d[0]).match(/SVG/)?(d.empty(),a(dd(e.template,x.document).childNodes)(b,function(a){d.append(a)},{futureParentElement:d})):(d.html(e.template),a(d.contents())(b))}}}],We=Qa({priority:450,compile:function(){return{pre:function(a,b,d){a.$eval(d.ngInit)}}}}),hf=function(){return{restrict:"A",priority:100,require:"ngModel",link:function(a,b,d,c){var e=d.ngList||", ",f="false"!==d.ngTrim,g=f?T(e):e;c.$parsers.push(function(a){if(!w(a)){var b=
+[];a&&q(a.split(g),function(a){a&&b.push(f?T(a):a)});return b}});c.$formatters.push(function(a){if(H(a))return a.join(e)});c.$isEmpty=function(a){return!a||!a.length}}}},nb="ng-valid",Yd="ng-invalid",Va="ng-pristine",Rb="ng-dirty",pb=L("ngModel");Ob.$inject="$scope $exceptionHandler $attrs $element $parse $animate $timeout $q $interpolate".split(" ");Ob.prototype={$$initGetterSetters:function(){if(this.$options.getOption("getterSetter")){var a=this.$$parse(this.$$attr.ngModel+"()"),b=this.$$parse(this.$$attr.ngModel+
+"($$$p)");this.$$ngModelGet=function(b){var c=this.$$parsedNgModel(b);D(c)&&(c=a(b));return c};this.$$ngModelSet=function(a,c){D(this.$$parsedNgModel(a))?b(a,{$$$p:c}):this.$$parsedNgModelAssign(a,c)}}else if(!this.$$parsedNgModel.assign)throw pb("nonassign",this.$$attr.ngModel,xa(this.$$element));},$render:z,$isEmpty:function(a){return w(a)||""===a||null===a||a!==a},$$updateEmptyClasses:function(a){this.$isEmpty(a)?(this.$$animate.removeClass(this.$$element,"ng-not-empty"),this.$$animate.addClass(this.$$element,
+"ng-empty")):(this.$$animate.removeClass(this.$$element,"ng-empty"),this.$$animate.addClass(this.$$element,"ng-not-empty"))},$setPristine:function(){this.$dirty=!1;this.$pristine=!0;this.$$animate.removeClass(this.$$element,Rb);this.$$animate.addClass(this.$$element,Va)},$setDirty:function(){this.$dirty=!0;this.$pristine=!1;this.$$animate.removeClass(this.$$element,Va);this.$$animate.addClass(this.$$element,Rb);this.$$parentForm.$setDirty()},$setUntouched:function(){this.$touched=!1;this.$untouched=
+!0;this.$$animate.setClass(this.$$element,"ng-untouched","ng-touched")},$setTouched:function(){this.$touched=!0;this.$untouched=!1;this.$$animate.setClass(this.$$element,"ng-touched","ng-untouched")},$rollbackViewValue:function(){this.$$timeout.cancel(this.$$pendingDebounce);this.$viewValue=this.$$lastCommittedViewValue;this.$render()},$validate:function(){if(!da(this.$modelValue)){var a=this.$$lastCommittedViewValue,b=this.$$rawModelValue,d=this.$valid,c=this.$modelValue,e=this.$options.getOption("allowInvalid"),
+f=this;this.$$runValidators(b,a,function(a){e||d===a||(f.$modelValue=a?b:void 0,f.$modelValue!==c&&f.$$writeModelToScope())})}},$$runValidators:function(a,b,d){function c(){var c=!0;q(k.$validators,function(d,e){var g=Boolean(d(a,b));c=c&&g;f(e,g)});return c?!0:(q(k.$asyncValidators,function(a,b){f(b,null)}),!1)}function e(){var c=[],d=!0;q(k.$asyncValidators,function(e,g){var k=e(a,b);if(!k||!D(k.then))throw pb("nopromise",k);f(g,void 0);c.push(k.then(function(){f(g,!0)},function(){d=!1;f(g,!1)}))});
+c.length?k.$$q.all(c).then(function(){g(d)},z):g(!0)}function f(a,b){h===k.$$currentValidationRunId&&k.$setValidity(a,b)}function g(a){h===k.$$currentValidationRunId&&d(a)}this.$$currentValidationRunId++;var h=this.$$currentValidationRunId,k=this;(function(){var a=k.$$parserName||"parse";if(w(k.$$parserValid))f(a,null);else return k.$$parserValid||(q(k.$validators,function(a,b){f(b,null)}),q(k.$asyncValidators,function(a,b){f(b,null)})),f(a,k.$$parserValid),k.$$parserValid;return!0})()?c()?e():g(!1):
+g(!1)},$commitViewValue:function(){var a=this.$viewValue;this.$$timeout.cancel(this.$$pendingDebounce);if(this.$$lastCommittedViewValue!==a||""===a&&this.$$hasNativeValidators)this.$$updateEmptyClasses(a),this.$$lastCommittedViewValue=a,this.$pristine&&this.$setDirty(),this.$$parseAndValidate()},$$parseAndValidate:function(){var a=this.$$lastCommittedViewValue,b=this;if(this.$$parserValid=w(a)?void 0:!0)for(var d=0;d<this.$parsers.length;d++)if(a=this.$parsers[d](a),w(a)){this.$$parserValid=!1;break}da(this.$modelValue)&&
+(this.$modelValue=this.$$ngModelGet(this.$$scope));var c=this.$modelValue,e=this.$options.getOption("allowInvalid");this.$$rawModelValue=a;e&&(this.$modelValue=a,b.$modelValue!==c&&b.$$writeModelToScope());this.$$runValidators(a,this.$$lastCommittedViewValue,function(d){e||(b.$modelValue=d?a:void 0,b.$modelValue!==c&&b.$$writeModelToScope())})},$$writeModelToScope:function(){this.$$ngModelSet(this.$$scope,this.$modelValue);q(this.$viewChangeListeners,function(a){try{a()}catch(b){this.$$exceptionHandler(b)}},
+this)},$setViewValue:function(a,b){this.$viewValue=a;this.$options.getOption("updateOnDefault")&&this.$$debounceViewValueCommit(b)},$$debounceViewValueCommit:function(a){var b=this.$options.getOption("debounce");ba(b[a])?b=b[a]:ba(b["default"])&&(b=b["default"]);this.$$timeout.cancel(this.$$pendingDebounce);var d=this;0<b?this.$$pendingDebounce=this.$$timeout(function(){d.$commitViewValue()},b):this.$$scope.$root.$$phase?this.$commitViewValue():this.$$scope.$apply(function(){d.$commitViewValue()})},
+$overrideModelOptions:function(a){this.$options=this.$options.createChild(a)}};Zd({clazz:Ob,set:function(a,b){a[b]=!0},unset:function(a,b){delete a[b]}});var gf=["$rootScope",function(a){return{restrict:"A",require:["ngModel","^?form","^?ngModelOptions"],controller:Ob,priority:1,compile:function(b){b.addClass(Va).addClass("ng-untouched").addClass(nb);return{pre:function(a,b,e,f){var g=f[0];b=f[1]||g.$$parentForm;if(f=f[2])g.$options=f.$options;g.$$initGetterSetters();b.$addControl(g);e.$observe("name",
+function(a){g.$name!==a&&g.$$parentForm.$$renameControl(g,a)});a.$on("$destroy",function(){g.$$parentForm.$removeControl(g)})},post:function(b,c,e,f){function g(){h.$setTouched()}var h=f[0];if(h.$options.getOption("updateOn"))c.on(h.$options.getOption("updateOn"),function(a){h.$$debounceViewValueCommit(a&&a.type)});c.on("blur",function(){h.$touched||(a.$$phase?b.$evalAsync(g):b.$apply(g))})}}}}}],Pb,eh=/(\s+|^)default(\s+|$)/;Gc.prototype={getOption:function(a){return this.$$options[a]},createChild:function(a){var b=
+!1;a=S({},a);q(a,function(d,c){"$inherit"===d?"*"===c?b=!0:(a[c]=this.$$options[c],"updateOn"===c&&(a.updateOnDefault=this.$$options.updateOnDefault)):"updateOn"===c&&(a.updateOnDefault=!1,a[c]=T(d.replace(eh,function(){a.updateOnDefault=!0;return" "})))},this);b&&(delete a["*"],ee(a,this.$$options));ee(a,Pb.$$options);return new Gc(a)}};Pb=new Gc({updateOn:"",updateOnDefault:!0,debounce:0,getterSetter:!1,allowInvalid:!1,timezone:null});var lf=function(){function a(a,d){this.$$attrs=a;this.$$scope=
+d}a.$inject=["$attrs","$scope"];a.prototype={$onInit:function(){var a=this.parentCtrl?this.parentCtrl.$options:Pb,d=this.$$scope.$eval(this.$$attrs.ngModelOptions);this.$options=a.createChild(d)}};return{restrict:"A",priority:10,require:{parentCtrl:"?^^ngModelOptions"},bindToController:!0,controller:a}},Xe=Qa({terminal:!0,priority:1E3}),fh=L("ngOptions"),gh=/^\s*([\s\S]+?)(?:\s+as\s+([\s\S]+?))?(?:\s+group\s+by\s+([\s\S]+?))?(?:\s+disable\s+when\s+([\s\S]+?))?\s+for\s+(?:([$\w][$\w]*)|(?:\(\s*([$\w][$\w]*)\s*,\s*([$\w][$\w]*)\s*\)))\s+in\s+([\s\S]+?)(?:\s+track\s+by\s+([\s\S]+?))?$/,
+ef=["$compile","$document","$parse",function(a,b,d){function c(a,b,c){function e(a,b,c,d,f){this.selectValue=a;this.viewValue=b;this.label=c;this.group=d;this.disabled=f}function f(a){var b;if(!q&&qa(a))b=a;else{b=[];for(var c in a)a.hasOwnProperty(c)&&"$"!==c.charAt(0)&&b.push(c)}return b}var n=a.match(gh);if(!n)throw fh("iexp",a,xa(b));var p=n[5]||n[7],q=n[6];a=/ as /.test(n[0])&&n[1];var s=n[9];b=d(n[2]?n[1]:p);var v=a&&d(a)||b,t=s&&d(s),u=s?function(a,b){return t(c,b)}:function(a){return Pa(a)},
+w=function(a,b){return u(a,G(a,b))},A=d(n[2]||n[1]),x=d(n[3]||""),I=d(n[4]||""),K=d(n[8]),E={},G=q?function(a,b){E[q]=b;E[p]=a;return E}:function(a){E[p]=a;return E};return{trackBy:s,getTrackByValue:w,getWatchables:d(K,function(a){var b=[];a=a||[];for(var d=f(a),e=d.length,g=0;g<e;g++){var h=a===d?g:d[g],l=a[h],h=G(l,h),l=u(l,h);b.push(l);if(n[2]||n[1])l=A(c,h),b.push(l);n[4]&&(h=I(c,h),b.push(h))}return b}),getOptions:function(){for(var a=[],b={},d=K(c)||[],g=f(d),h=g.length,n=0;n<h;n++){var p=d===
+g?n:g[n],q=G(d[p],p),r=v(c,q),p=u(r,q),t=A(c,q),E=x(c,q),q=I(c,q),r=new e(p,r,t,E,q);a.push(r);b[p]=r}return{items:a,selectValueMap:b,getOptionFromViewValue:function(a){return b[w(a)]},getViewValueFromOption:function(a){return s?ra(a.viewValue):a.viewValue}}}}}var e=x.document.createElement("option"),f=x.document.createElement("optgroup");return{restrict:"A",terminal:!0,require:["select","ngModel"],link:{pre:function(a,b,c,d){d[0].registerOption=z},post:function(d,h,k,l){function m(a){var b=(a=A.getOptionFromViewValue(a))&&
+a.element;b&&!b.selected&&(b.selected=!0);return a}function n(a,b){a.element=b;b.disabled=a.disabled;a.label!==b.label&&(b.label=a.label,b.textContent=a.label);b.value=a.selectValue}function p(){var a=A&&r.readValue();if(A)for(var b=A.items.length-1;0<=b;b--){var c=A.items[b];u(c.group)?Eb(c.element.parentNode):Eb(c.element)}A=z.getOptions();var d={};x&&h.prepend(r.emptyOption);A.items.forEach(function(a){var b;if(u(a.group)){b=d[a.group];b||(b=f.cloneNode(!1),I.appendChild(b),b.label=null===a.group?
+"null":a.group,d[a.group]=b);var c=e.cloneNode(!1)}else b=I,c=e.cloneNode(!1);b.appendChild(c);n(a,c)});h[0].appendChild(I);s.$render();s.$isEmpty(a)||(b=r.readValue(),(z.trackBy||v?sa(a,b):a===b)||(s.$setViewValue(b),s.$render()))}var r=l[0],s=l[1],v=k.multiple;l=0;for(var t=h.children(),w=t.length;l<w;l++)if(""===t[l].value){r.hasEmptyOption=!0;r.emptyOption=t.eq(l);break}var x=!!r.emptyOption;B(e.cloneNode(!1)).val("?");var A,z=c(k.ngOptions,h,d),I=b[0].createDocumentFragment();r.generateUnknownOptionValue=
+function(a){return"?"};v?(r.writeValue=function(a){var b=a&&a.map(m)||[];A.items.forEach(function(a){a.element.selected&&-1===Array.prototype.indexOf.call(b,a)&&(a.element.selected=!1)})},r.readValue=function(){var a=h.val()||[],b=[];q(a,function(a){(a=A.selectValueMap[a])&&!a.disabled&&b.push(A.getViewValueFromOption(a))});return b},z.trackBy&&d.$watchCollection(function(){if(H(s.$viewValue))return s.$viewValue.map(function(a){return z.getTrackByValue(a)})},function(){s.$render()})):(r.writeValue=
+function(a){var b=A.selectValueMap[h.val()],c=A.getOptionFromViewValue(a);b&&b.element.removeAttribute("selected");c?(h[0].value!==c.selectValue&&(r.removeUnknownOption(),r.unselectEmptyOption(),h[0].value=c.selectValue,c.element.selected=!0),c.element.setAttribute("selected","selected")):x?r.selectEmptyOption():r.unknownOption.parent().length?r.updateUnknownOption(a):r.renderUnknownOption(a)},r.readValue=function(){var a=A.selectValueMap[h.val()];return a&&!a.disabled?(r.unselectEmptyOption(),r.removeUnknownOption(),
+A.getViewValueFromOption(a)):null},z.trackBy&&d.$watch(function(){return z.getTrackByValue(s.$viewValue)},function(){s.$render()}));x&&(r.emptyOption.remove(),a(r.emptyOption)(d),8===r.emptyOption[0].nodeType?(r.hasEmptyOption=!1,r.registerOption=function(a,b){""===b.val()&&(r.hasEmptyOption=!0,r.emptyOption=b,r.emptyOption.removeClass("ng-scope"),s.$render(),b.on("$destroy",function(){r.hasEmptyOption=!1;r.emptyOption=void 0}))}):r.emptyOption.removeClass("ng-scope"));h.empty();p();d.$watchCollection(z.getWatchables,
+p)}}}}],Ye=["$locale","$interpolate","$log",function(a,b,d){var c=/{}/g,e=/^when(Minus)?(.+)$/;return{link:function(f,g,h){function k(a){g.text(a||"")}var l=h.count,m=h.$attr.when&&g.attr(h.$attr.when),n=h.offset||0,p=f.$eval(m)||{},r={},s=b.startSymbol(),v=b.endSymbol(),t=s+l+"-"+n+v,u=ea.noop,x;q(h,function(a,b){var c=e.exec(b);c&&(c=(c[1]?"-":"")+Q(c[2]),p[c]=g.attr(h.$attr[b]))});q(p,function(a,d){r[d]=b(a.replace(c,t))});f.$watch(l,function(b){var c=parseFloat(b),e=da(c);e||c in p||(c=a.pluralCat(c-
+n));c===x||e&&da(x)||(u(),e=r[c],w(e)?(null!=b&&d.debug("ngPluralize: no rule defined for '"+c+"' in "+m),u=z,k()):u=f.$watch(e,k),x=c)})}}}],Ze=["$parse","$animate","$compile",function(a,b,d){var c=L("ngRepeat"),e=function(a,b,c,d,e,m,n){a[c]=d;e&&(a[e]=m);a.$index=b;a.$first=0===b;a.$last=b===n-1;a.$middle=!(a.$first||a.$last);a.$odd=!(a.$even=0===(b&1))};return{restrict:"A",multiElement:!0,transclude:"element",priority:1E3,terminal:!0,$$tlb:!0,compile:function(f,g){var h=g.ngRepeat,k=d.$$createComment("end ngRepeat",
+h),l=h.match(/^\s*([\s\S]+?)\s+in\s+([\s\S]+?)(?:\s+as\s+([\s\S]+?))?(?:\s+track\s+by\s+([\s\S]+?))?\s*$/);if(!l)throw c("iexp",h);var m=l[1],n=l[2],p=l[3],r=l[4],l=m.match(/^(?:(\s*[$\w]+)|\(\s*([$\w]+)\s*,\s*([$\w]+)\s*\))$/);if(!l)throw c("iidexp",m);var s=l[3]||l[1],v=l[2];if(p&&(!/^[$a-zA-Z_][$a-zA-Z0-9_]*$/.test(p)||/^(null|undefined|this|\$index|\$first|\$middle|\$last|\$even|\$odd|\$parent|\$root|\$id)$/.test(p)))throw c("badident",p);var t,u,w,x,z={$id:Pa};r?t=a(r):(w=function(a,b){return Pa(b)},
+x=function(a){return a});return function(a,d,f,g,l){t&&(u=function(b,c,d){v&&(z[v]=b);z[s]=c;z.$index=d;return t(a,z)});var m=V();a.$watchCollection(n,function(f){var g,n,r=d[0],t,z=V(),B,D,F,C,G,E,H;p&&(a[p]=f);if(qa(f))G=f,n=u||w;else for(H in n=u||x,G=[],f)ua.call(f,H)&&"$"!==H.charAt(0)&&G.push(H);B=G.length;H=Array(B);for(g=0;g<B;g++)if(D=f===G?g:G[g],F=f[D],C=n(D,F,g),m[C])E=m[C],delete m[C],z[C]=E,H[g]=E;else{if(z[C])throw q(H,function(a){a&&a.scope&&(m[a.id]=a)}),c("dupes",h,C,F);H[g]={id:C,
+scope:void 0,clone:void 0};z[C]=!0}for(t in m){E=m[t];C=tb(E.clone);b.leave(C);if(C[0].parentNode)for(g=0,n=C.length;g<n;g++)C[g].$$NG_REMOVED=!0;E.scope.$destroy()}for(g=0;g<B;g++)if(D=f===G?g:G[g],F=f[D],E=H[g],E.scope){t=r;do t=t.nextSibling;while(t&&t.$$NG_REMOVED);E.clone[0]!==t&&b.move(tb(E.clone),null,r);r=E.clone[E.clone.length-1];e(E.scope,g,s,F,v,D,B)}else l(function(a,c){E.scope=c;var d=k.cloneNode(!1);a[a.length++]=d;b.enter(a,null,r);r=d;E.clone=a;z[E.id]=E;e(E.scope,g,s,F,v,D,B)});m=
+z})}}}}],$e=["$animate",function(a){return{restrict:"A",multiElement:!0,link:function(b,d,c){b.$watch(c.ngShow,function(b){a[b?"removeClass":"addClass"](d,"ng-hide",{tempClasses:"ng-hide-animate"})})}}}],Te=["$animate",function(a){return{restrict:"A",multiElement:!0,link:function(b,d,c){b.$watch(c.ngHide,function(b){a[b?"addClass":"removeClass"](d,"ng-hide",{tempClasses:"ng-hide-animate"})})}}}],af=Qa(function(a,b,d){a.$watch(d.ngStyle,function(a,d){d&&a!==d&&q(d,function(a,c){b.css(c,"")});a&&b.css(a)},
+!0)}),bf=["$animate","$compile",function(a,b){return{require:"ngSwitch",controller:["$scope",function(){this.cases={}}],link:function(d,c,e,f){var g=[],h=[],k=[],l=[],m=function(a,b){return function(c){!1!==c&&a.splice(b,1)}};d.$watch(e.ngSwitch||e.on,function(c){for(var d,e;k.length;)a.cancel(k.pop());d=0;for(e=l.length;d<e;++d){var s=tb(h[d].clone);l[d].$destroy();(k[d]=a.leave(s)).done(m(k,d))}h.length=0;l.length=0;(g=f.cases["!"+c]||f.cases["?"])&&q(g,function(c){c.transclude(function(d,e){l.push(e);
+var f=c.element;d[d.length++]=b.$$createComment("end ngSwitchWhen");h.push({clone:d});a.enter(d,f.parent(),f)})})})}}}],cf=Qa({transclude:"element",priority:1200,require:"^ngSwitch",multiElement:!0,link:function(a,b,d,c,e){a=d.ngSwitchWhen.split(d.ngSwitchWhenSeparator).sort().filter(function(a,b,c){return c[b-1]!==a});q(a,function(a){c.cases["!"+a]=c.cases["!"+a]||[];c.cases["!"+a].push({transclude:e,element:b})})}}),df=Qa({transclude:"element",priority:1200,require:"^ngSwitch",multiElement:!0,link:function(a,
+b,d,c,e){c.cases["?"]=c.cases["?"]||[];c.cases["?"].push({transclude:e,element:b})}}),hh=L("ngTransclude"),ff=["$compile",function(a){return{restrict:"EAC",terminal:!0,compile:function(b){var d=a(b.contents());b.empty();return function(a,b,f,g,h){function k(){d(a,function(a){b.append(a)})}if(!h)throw hh("orphan",xa(b));f.ngTransclude===f.$attr.ngTransclude&&(f.ngTransclude="");f=f.ngTransclude||f.ngTranscludeSlot;h(function(a,c){var d;if(d=a.length)a:{d=0;for(var f=a.length;d<f;d++){var g=a[d];if(g.nodeType!==
+Ia||g.nodeValue.trim()){d=!0;break a}}d=void 0}d?b.append(a):(k(),c.$destroy())},null,f);f&&!h.isSlotFilled(f)&&k()}}}}],He=["$templateCache",function(a){return{restrict:"E",terminal:!0,compile:function(b,d){"text/ng-template"===d.type&&a.put(d.id,b[0].text)}}}],ih={$setViewValue:z,$render:z},jh=["$element","$scope",function(a,b){function d(){g||(g=!0,b.$$postDigest(function(){g=!1;e.ngModelCtrl.$render()}))}function c(a){h||(h=!0,b.$$postDigest(function(){b.$$destroyed||(h=!1,e.ngModelCtrl.$setViewValue(e.readValue()),
+a&&e.ngModelCtrl.$render())}))}var e=this,f=new Gb;e.selectValueMap={};e.ngModelCtrl=ih;e.multiple=!1;e.unknownOption=B(x.document.createElement("option"));e.hasEmptyOption=!1;e.emptyOption=void 0;e.renderUnknownOption=function(b){b=e.generateUnknownOptionValue(b);e.unknownOption.val(b);a.prepend(e.unknownOption);Ta(e.unknownOption,!0);a.val(b)};e.updateUnknownOption=function(b){b=e.generateUnknownOptionValue(b);e.unknownOption.val(b);Ta(e.unknownOption,!0);a.val(b)};e.generateUnknownOptionValue=
+function(a){return"? "+Pa(a)+" ?"};e.removeUnknownOption=function(){e.unknownOption.parent()&&e.unknownOption.remove()};e.selectEmptyOption=function(){e.emptyOption&&(a.val(""),Ta(e.emptyOption,!0))};e.unselectEmptyOption=function(){e.hasEmptyOption&&e.emptyOption.removeAttr("selected")};b.$on("$destroy",function(){e.renderUnknownOption=z});e.readValue=function(){var b=a.val(),b=b in e.selectValueMap?e.selectValueMap[b]:b;return e.hasOption(b)?b:null};e.writeValue=function(b){var c=a[0].options[a[0].selectedIndex];
+c&&Ta(B(c),!1);e.hasOption(b)?(e.removeUnknownOption(),c=Pa(b),a.val(c in e.selectValueMap?c:b),Ta(B(a[0].options[a[0].selectedIndex]),!0)):null==b&&e.emptyOption?(e.removeUnknownOption(),e.selectEmptyOption()):e.unknownOption.parent().length?e.updateUnknownOption(b):e.renderUnknownOption(b)};e.addOption=function(a,b){if(8!==b[0].nodeType){Ka(a,'"option value"');""===a&&(e.hasEmptyOption=!0,e.emptyOption=b);var c=f.get(a)||0;f.set(a,c+1);d()}};e.removeOption=function(a){var b=f.get(a);b&&(1===b?(f.delete(a),
+""===a&&(e.hasEmptyOption=!1,e.emptyOption=void 0)):f.set(a,b-1))};e.hasOption=function(a){return!!f.get(a)};var g=!1,h=!1;e.registerOption=function(a,b,f,g,h){if(f.$attr.ngValue){var q,s=NaN;f.$observe("value",function(a){var d,f=b.prop("selected");u(s)&&(e.removeOption(q),delete e.selectValueMap[s],d=!0);s=Pa(a);q=a;e.selectValueMap[s]=a;e.addOption(a,b);b.attr("value",s);d&&f&&c()})}else g?f.$observe("value",function(a){e.readValue();var d,f=b.prop("selected");u(q)&&(e.removeOption(q),d=!0);q=
+a;e.addOption(a,b);d&&f&&c()}):h?a.$watch(h,function(a,d){f.$set("value",a);var g=b.prop("selected");d!==a&&e.removeOption(d);e.addOption(a,b);d&&g&&c()}):e.addOption(f.value,b);f.$observe("disabled",function(a){if("true"===a||a&&b.prop("selected"))e.multiple?c(!0):(e.ngModelCtrl.$setViewValue(null),e.ngModelCtrl.$render())});b.on("$destroy",function(){var a=e.readValue(),b=f.value;e.removeOption(b);d();(e.multiple&&a&&-1!==a.indexOf(b)||a===b)&&c(!0)})}}],Ie=function(){return{restrict:"E",require:["select",
+"?ngModel"],controller:jh,priority:1,link:{pre:function(a,b,d,c){var e=c[0],f=c[1];if(f){if(e.ngModelCtrl=f,b.on("change",function(){e.removeUnknownOption();a.$apply(function(){f.$setViewValue(e.readValue())})}),d.multiple){e.multiple=!0;e.readValue=function(){var a=[];q(b.find("option"),function(b){b.selected&&!b.disabled&&(b=b.value,a.push(b in e.selectValueMap?e.selectValueMap[b]:b))});return a};e.writeValue=function(a){q(b.find("option"),function(b){var c=!!a&&(-1!==Array.prototype.indexOf.call(a,
+b.value)||-1!==Array.prototype.indexOf.call(a,e.selectValueMap[b.value]));c!==b.selected&&Ta(B(b),c)})};var g,h=NaN;a.$watch(function(){h!==f.$viewValue||sa(g,f.$viewValue)||(g=pa(f.$viewValue),f.$render());h=f.$viewValue});f.$isEmpty=function(a){return!a||0===a.length}}}else e.registerOption=z},post:function(a,b,d,c){var e=c[1];if(e){var f=c[0];e.$render=function(){f.writeValue(e.$viewValue)}}}}}},Je=["$interpolate",function(a){return{restrict:"E",priority:100,compile:function(b,d){var c,e;u(d.ngValue)||
+(u(d.value)?c=a(d.value,!0):(e=a(b.text(),!0))||d.$set("value",b.text()));return function(a,b,d){var k=b.parent();(k=k.data("$selectController")||k.parent().data("$selectController"))&&k.registerOption(a,b,d,c,e)}}}}],Zc=function(){return{restrict:"A",require:"?ngModel",link:function(a,b,d,c){c&&(d.required=!0,c.$validators.required=function(a,b){return!d.required||!c.$isEmpty(b)},d.$observe("required",function(){c.$validate()}))}}},Yc=function(){return{restrict:"A",require:"?ngModel",link:function(a,
+b,d,c){if(c){var e,f=d.ngPattern||d.pattern;d.$observe("pattern",function(a){F(a)&&0<a.length&&(a=new RegExp("^"+a+"$"));if(a&&!a.test)throw L("ngPattern")("noregexp",f,a,xa(b));e=a||void 0;c.$validate()});c.$validators.pattern=function(a,b){return c.$isEmpty(b)||w(e)||e.test(b)}}}}},ad=function(){return{restrict:"A",require:"?ngModel",link:function(a,b,d,c){if(c){var e=-1;d.$observe("maxlength",function(a){a=Z(a);e=da(a)?-1:a;c.$validate()});c.$validators.maxlength=function(a,b){return 0>e||c.$isEmpty(b)||
+b.length<=e}}}}},$c=function(){return{restrict:"A",require:"?ngModel",link:function(a,b,d,c){if(c){var e=0;d.$observe("minlength",function(a){e=Z(a)||0;c.$validate()});c.$validators.minlength=function(a,b){return c.$isEmpty(b)||b.length>=e}}}}};x.angular.bootstrap?x.console&&console.log("WARNING: Tried to load angular more than once."):(ze(),Ce(ea),ea.module("ngLocale",[],["$provide",function(a){function b(a){a+="";var b=a.indexOf(".");return-1==b?0:a.length-b-1}a.value("$locale",{DATETIME_FORMATS:{AMPMS:["AM",
+"PM"],DAY:"Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),ERANAMES:["Before Christ","Anno Domini"],ERAS:["BC","AD"],FIRSTDAYOFWEEK:6,MONTH:"January February March April May June July August September October November December".split(" "),SHORTDAY:"Sun Mon Tue Wed Thu Fri Sat".split(" "),SHORTMONTH:"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "),STANDALONEMONTH:"January February March April May June July August September October November December".split(" "),WEEKENDRANGE:[5,
+6],fullDate:"EEEE, MMMM d, y",longDate:"MMMM d, y",medium:"MMM d, y h:mm:ss a",mediumDate:"MMM d, y",mediumTime:"h:mm:ss a","short":"M/d/yy h:mm a",shortDate:"M/d/yy",shortTime:"h:mm a"},NUMBER_FORMATS:{CURRENCY_SYM:"$",DECIMAL_SEP:".",GROUP_SEP:",",PATTERNS:[{gSize:3,lgSize:3,maxFrac:3,minFrac:0,minInt:1,negPre:"-",negSuf:"",posPre:"",posSuf:""},{gSize:3,lgSize:3,maxFrac:2,minFrac:2,minInt:1,negPre:"-\u00a4",negSuf:"",posPre:"\u00a4",posSuf:""}]},id:"en-us",localeID:"en_US",pluralCat:function(a,
+c){var e=a|0,f=c;void 0===f&&(f=Math.min(b(a),3));Math.pow(10,f);return 1==e&&0==f?"one":"other"}})}]),B(function(){ue(x.document,Sc)}))})(window);!window.angular.$$csp().noInlineStyle&&window.angular.element(document.head).prepend('<style type="text/css">@charset "UTF-8";[ng\\:cloak],[ng-cloak],[data-ng-cloak],[x-ng-cloak],.ng-cloak,.x-ng-cloak,.ng-hide:not(.ng-hide-animate){display:none !important;}ng\\:form{display:block;}.ng-animate-shim{visibility:hidden;}.ng-anchor{position:absolute;}</style>');
+//# sourceMappingURL=angular.min.js.map
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js
new file mode 100644
index 0000000..4aced57
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js
@@ -0,0 +1 @@
+!function(window){"use strict";var nv=window.nv;"undefined"!=typeof exports&&(nv=require("nvd3")),angular.module("nvd3",[]).directive("nvd3",["nvd3Utils",function(nvd3Utils){return{restrict:"AE",scope:{data:"=",options:"=",api:"=?",events:"=?",config:"=?",onReady:"&?"},link:function(scope,element,attrs){function configure(chart,options,chartType){chart&&options&&angular.forEach(chart,function(value,key){"_"===key[0]||("dispatch"===key?(void 0!==options[key]&&null!==options[key]||scope._config.extended&&(options[key]={}),configureEvents(value,options[key])):"tooltip"===key?(void 0!==options[key]&&null!==options[key]||scope._config.extended&&(options[key]={}),configure(chart[key],options[key],chartType)):"contentGenerator"===key?options[key]&&chart[key](options[key]):-1===["axis","clearHighlights","defined","highlightPoint","nvPointerEventsClass","options","rangeBand","rangeBands","scatter","open","close","node"].indexOf(key)&&(void 0===options[key]||null===options[key]?scope._config.extended&&(options[key]=value()):chart[key](options[key])))})}function configureEvents(dispatch,options){dispatch&&options&&angular.forEach(dispatch,function(value,key){void 0===options[key]||null===options[key]?scope._config.extended&&(options[key]=value.on):dispatch.on(key+"._",options[key])})}function configureWrapper(name){var _=nvd3Utils.deepExtend(defaultWrapper(name),scope.options[name]||{});scope._config.extended&&(scope.options[name]=_);var wrapElement=angular.element("<div></div>").html(_.html||"").addClass(name).addClass(_.className).removeAttr("style").css(_.css);_.html||wrapElement.text(_.text),_.enable&&("title"===name?element.prepend(wrapElement):"subtitle"===name?angular.element(element[0].querySelector(".title")).after(wrapElement):"caption"===name&&element.append(wrapElement))}function configureStyles(){var _=nvd3Utils.deepExtend(defaultStyles(),scope.options.styles||{});scope._config.extended&&(scope.options.styles=_),angular.forEach(_.classes,function(value,key){value?element.addClass(key):element.removeClass(key)}),element.removeAttr("style").css(_.css)}function defaultWrapper(_){switch(_){case"title":return{enable:!1,text:"Write Your Title",className:"h4",css:{width:scope.options.chart.width+"px",textAlign:"center"}};case"subtitle":return{enable:!1,text:"Write Your Subtitle",css:{width:scope.options.chart.width+"px",textAlign:"center"}};case"caption":return{enable:!1,text:"Figure 1. Write Your Caption text.",css:{width:scope.options.chart.width+"px",textAlign:"center"}}}}function defaultStyles(){return{classes:{"with-3d-shadow":!0,"with-transitions":!0,gallery:!1},css:{}}}function dataWatchFn(newData,oldData){newData!==oldData&&(scope._config.disabled||(scope._config.refreshDataOnly?scope.api.update():scope.api.refresh()))}var defaultConfig={extended:!1,visible:!0,disabled:!1,refreshDataOnly:!0,deepWatchOptions:!0,deepWatchData:!0,deepWatchDataDepth:2,debounce:10,debounceImmediate:!0};scope.isReady=!1,scope._config=angular.extend(defaultConfig,scope.config),scope.api={refresh:function(){scope.api.updateWithOptions(),scope.isReady=!0},refreshWithTimeout:function(t){setTimeout(function(){scope.api.refresh()},t)},update:function(){scope.chart&&scope.svg?"sunburstChart"===scope.options.chart.type?scope.svg.datum(angular.copy(scope.data)).call(scope.chart):scope.svg.datum(scope.data).call(scope.chart):scope.api.refresh()},updateWithTimeout:function(t){setTimeout(function(){scope.api.update()},t)},updateWithOptions:function(options){if(arguments.length){if(scope.options=options,scope._config.deepWatchOptions&&!scope._config.disabled)return}else options=scope.options;scope.api.clearElement(),angular.isDefined(options)!==!1&&scope._config.visible&&(scope.chart=nv.models[options.chart.type](),scope.chart.id=Math.random().toString(36).substr(2,15),angular.forEach(scope.chart,function(value,key){"_"===key[0]||["clearHighlights","highlightPoint","id","options","resizeHandler","state","open","close","tooltipContent"].indexOf(key)>=0||("dispatch"===key?(void 0!==options.chart[key]&&null!==options.chart[key]||scope._config.extended&&(options.chart[key]={}),configureEvents(scope.chart[key],options.chart[key])):["bars","bars1","bars2","boxplot","bullet","controls","discretebar","distX","distY","focus","interactiveLayer","legend","lines","lines1","lines2","multibar","pie","scatter","scatters1","scatters2","sparkline","stack1","stack2","sunburst","tooltip","x2Axis","xAxis","y1Axis","y2Axis","y3Axis","y4Axis","yAxis","yAxis1","yAxis2"].indexOf(key)>=0||"stacked"===key&&"stackedAreaChart"===options.chart.type?(void 0!==options.chart[key]&&null!==options.chart[key]||scope._config.extended&&(options.chart[key]={}),configure(scope.chart[key],options.chart[key],options.chart.type)):"focusHeight"===key&&"lineChart"===options.chart.type||"focusHeight"===key&&"lineWithFocusChart"===options.chart.type||("xTickFormat"!==key&&"yTickFormat"!==key||"lineWithFocusChart"!==options.chart.type)&&("tooltips"===key&&"boxPlotChart"===options.chart.type||("tooltipXContent"!==key&&"tooltipYContent"!==key||"scatterChart"!==options.chart.type)&&("x"!==key&&"y"!==key||"forceDirectedGraph"!==options.chart.type)&&(void 0===options.chart[key]||null===options.chart[key]?scope._config.extended&&("barColor"===key?options.chart[key]=value()():options.chart[key]=value()):scope.chart[key](options.chart[key]))))}),scope.api.updateWithData(),(options.title||scope._config.extended)&&configureWrapper("title"),(options.subtitle||scope._config.extended)&&configureWrapper("subtitle"),(options.caption||scope._config.extended)&&configureWrapper("caption"),(options.styles||scope._config.extended)&&configureStyles(),nv.addGraph(function(){return scope.chart?(scope.chart.resizeHandler&&scope.chart.resizeHandler.clear(),scope.chart.resizeHandler=nv.utils.windowResize(function(){scope.chart&&scope.chart.update&&scope.chart.update()}),void 0!==options.chart.zoom&&["scatterChart","lineChart","candlestickBarChart","cumulativeLineChart","historicalBarChart","ohlcBarChart","stackedAreaChart"].indexOf(options.chart.type)>-1&&nvd3Utils.zoom(scope,options),scope.chart):void 0},options.chart.callback))},updateWithData:function(data){if(arguments.length){if(scope.data=data,scope._config.deepWatchData&&!scope._config.disabled)return}else data="sunburstChart"===scope.options.chart.type?angular.copy(scope.data):scope.data;if(data){d3.select(element[0]).select("svg").remove();var h,w;scope.svg=d3.select(element[0]).insert("svg",".caption"),(h=scope.options.chart.height)&&(isNaN(+h)||(h+="px"),scope.svg.attr("height",h).style({height:h})),(w=scope.options.chart.width)?(isNaN(+w)||(w+="px"),scope.svg.attr("width",w).style({width:w})):scope.svg.attr("width","100%").style({width:"100%"}),scope.svg.datum(data).call(scope.chart),scope.chart&&scope.chart.zoomRender&&scope.chart.zoomRender()}},clearElement:function(){if(element.find(".title").remove(),element.find(".subtitle").remove(),element.find(".caption").remove(),element.empty(),scope.chart&&scope.chart.tooltip&&scope.chart.tooltip.id&&d3.select("#"+scope.chart.tooltip.id()).remove(),nv.graphs&&scope.chart)for(var i=nv.graphs.length-1;i>=0;i--)nv.graphs[i]&&nv.graphs[i].id===scope.chart.id&&nv.graphs.splice(i,1);nv.tooltip&&nv.tooltip.cleanup&&nv.tooltip.cleanup(),scope.chart&&scope.chart.resizeHandler&&scope.chart.resizeHandler.clear(),scope.chart=null},getScope:function(){return scope},getElement:function(){return element}},scope._config.deepWatchOptions&&scope.$watch("options",nvd3Utils.debounce(function(newOptions){scope._config.disabled||scope.api.refresh()},scope._config.debounce,scope._config.debounceImmediate),!0),scope._config.deepWatchData&&(1===scope._config.deepWatchDataDepth?scope.$watchCollection("data",dataWatchFn):scope.$watch("data",dataWatchFn,2===scope._config.deepWatchDataDepth)),scope.$watch("config",function(newConfig,oldConfig){newConfig!==oldConfig&&(scope._config=angular.extend(defaultConfig,newConfig),scope.api.refresh())},!0),scope._config.deepWatchOptions||scope._config.deepWatchData||scope.api.refresh(),angular.forEach(scope.events,function(eventHandler,event){scope.$on(event,function(e,args){return eventHandler(e,scope,args)})}),element.on("$destroy",function(){scope.api.clearElement()}),scope.$watch("isReady",function(isReady){isReady&&scope.onReady&&"function"==typeof scope.onReady()&&scope.onReady()(scope,element)})}}}]).factory("nvd3Utils",function(){return{debounce:function(func,wait,immediate){var timeout;return function(){var context=this,args=arguments,later=function(){timeout=null,immediate||func.apply(context,args)},callNow=immediate&&!timeout;clearTimeout(timeout),timeout=setTimeout(later,wait),callNow&&func.apply(context,args)}},deepExtend:function(dst){var me=this;return angular.forEach(arguments,function(obj){obj!==dst&&angular.forEach(obj,function(value,key){dst[key]&&dst[key].constructor&&dst[key].constructor===Object?me.deepExtend(dst[key],value):dst[key]=value})}),dst},zoom:function(scope,options){var zoom=options.chart.zoom,enabled="undefined"==typeof zoom.enabled||null===zoom.enabled?!0:zoom.enabled;if(enabled){var fixDomain,d3zoom,zoomed,unzoomed,zoomend,xScale=scope.chart.xAxis.scale(),yScale=scope.chart.yAxis.scale(),xDomain=scope.chart.xDomain||xScale.domain,yDomain=scope.chart.yDomain||yScale.domain,x_boundary=xScale.domain().slice(),y_boundary=yScale.domain().slice(),scale=zoom.scale||1,translate=zoom.translate||[0,0],scaleExtent=zoom.scaleExtent||[1,10],useFixedDomain=zoom.useFixedDomain||!1,useNiceScale=zoom.useNiceScale||!1,horizontalOff=zoom.horizontalOff||!1,verticalOff=zoom.verticalOff||!1,unzoomEventType=zoom.unzoomEventType||"dblclick.zoom";useNiceScale&&(xScale.nice(),yScale.nice()),fixDomain=function(domain,boundary){return domain[0]=Math.min(Math.max(domain[0],boundary[0]),boundary[1]-boundary[1]/scaleExtent[1]),domain[1]=Math.max(boundary[0]+boundary[1]/scaleExtent[1],Math.min(domain[1],boundary[1])),domain},zoomed=function(){if(void 0!==zoom.zoomed){var domains=zoom.zoomed(xScale.domain(),yScale.domain());horizontalOff||xDomain([domains.x1,domains.x2]),verticalOff||yDomain([domains.y1,domains.y2])}else horizontalOff||xDomain(useFixedDomain?fixDomain(xScale.domain(),x_boundary):xScale.domain()),verticalOff||yDomain(useFixedDomain?fixDomain(yScale.domain(),y_boundary):yScale.domain());scope.chart&&scope.chart.update()},unzoomed=function(){if(void 0!==zoom.unzoomed){var domains=zoom.unzoomed(xScale.domain(),yScale.domain());horizontalOff||xDomain([domains.x1,domains.x2]),verticalOff||yDomain([domains.y1,domains.y2])}else horizontalOff||xDomain(x_boundary),verticalOff||yDomain(y_boundary);d3zoom.scale(scale).translate(translate),scope.chart&&scope.chart.update()},zoomend=function(){void 0!==zoom.zoomend&&zoom.zoomend()},d3zoom=d3.behavior.zoom().x(xScale).y(yScale).scaleExtent(scaleExtent).on("zoom",zoomed).on("zoomend",zoomend),scope.svg&&(scope.svg.call(d3zoom),d3zoom.scale(scale).translate(translate).event(scope.svg),"none"!==unzoomEventType&&scope.svg.on(unzoomEventType,unzoomed)),scope.chart&&(scope.chart.zoomRender=function(){d3zoom.scale(scale).translate(translate),xScale=scope.chart.xAxis.scale(),yScale=scope.chart.yAxis.scale(),xDomain=scope.chart.xDomain||xScale.domain,yDomain=scope.chart.yDomain||yScale.domain,x_boundary=xScale.domain().slice(),y_boundary=yScale.domain().slice(),d3zoom.x(xScale).y(yScale),scope.svg.call(d3zoom),"none"!==unzoomEventType&&scope.svg.on(unzoomEventType,unzoomed)})}}}})}(window);
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js
new file mode 100644
index 0000000..3f985d1
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/angular-route-1.6.4.min.js
@@ -0,0 +1,17 @@
+/*
+ AngularJS v1.6.4
+ (c) 2010-2017 Google, Inc. http://angularjs.org
+ License: MIT
+*/
+(function(J,d){'use strict';function A(d){k&&d.get("$route")}function B(t,u,g){return{restrict:"ECA",terminal:!0,priority:400,transclude:"element",link:function(a,f,b,c,m){function v(){l&&(g.cancel(l),l=null);n&&(n.$destroy(),n=null);p&&(l=g.leave(p),l.done(function(a){!1!==a&&(l=null)}),p=null)}function E(){var b=t.current&&t.current.locals;if(d.isDefined(b&&b.$template)){var b=a.$new(),c=t.current;p=m(b,function(b){g.enter(b,null,p||f).done(function(b){!1===b||!d.isDefined(w)||w&&!a.$eval(w)||u()});
+v()});n=c.scope=b;n.$emit("$viewContentLoaded");n.$eval(k)}else v()}var n,p,l,w=b.autoscroll,k=b.onload||"";a.$on("$routeChangeSuccess",E);E()}}}function C(d,k,g){return{restrict:"ECA",priority:-400,link:function(a,f){var b=g.current,c=b.locals;f.html(c.$template);var m=d(f.contents());if(b.controller){c.$scope=a;var v=k(b.controller,c);b.controllerAs&&(a[b.controllerAs]=v);f.data("$ngControllerController",v);f.children().data("$ngControllerController",v)}a[b.resolveAs||"$resolve"]=c;m(a)}}}var x,
+y,F,G,z=d.module("ngRoute",[]).info({angularVersion:"1.6.4"}).provider("$route",function(){function t(a,f){return d.extend(Object.create(a),f)}function u(a,d){var b=d.caseInsensitiveMatch,c={originalPath:a,regexp:a},g=c.keys=[];a=a.replace(/([().])/g,"\\$1").replace(/(\/)?:(\w+)(\*\?|[?*])?/g,function(a,b,d,c){a="?"===c||"*?"===c?"?":null;c="*"===c||"*?"===c?"*":null;g.push({name:d,optional:!!a});b=b||"";return""+(a?"":b)+"(?:"+(a?b:"")+(c&&"(.+?)"||"([^/]+)")+(a||"")+")"+(a||"")}).replace(/([/$*])/g,
+"\\$1");c.regexp=new RegExp("^"+a+"$",b?"i":"");return c}x=d.isArray;y=d.isObject;F=d.isDefined;G=d.noop;var g={};this.when=function(a,f){var b;b=void 0;if(x(f)){b=b||[];for(var c=0,m=f.length;c<m;c++)b[c]=f[c]}else if(y(f))for(c in b=b||{},f)if("$"!==c.charAt(0)||"$"!==c.charAt(1))b[c]=f[c];b=b||f;d.isUndefined(b.reloadOnSearch)&&(b.reloadOnSearch=!0);d.isUndefined(b.caseInsensitiveMatch)&&(b.caseInsensitiveMatch=this.caseInsensitiveMatch);g[a]=d.extend(b,a&&u(a,b));a&&(c="/"===a[a.length-1]?a.substr(0,
+a.length-1):a+"/",g[c]=d.extend({redirectTo:a},u(c,b)));return this};this.caseInsensitiveMatch=!1;this.otherwise=function(a){"string"===typeof a&&(a={redirectTo:a});this.when(null,a);return this};k=!0;this.eagerInstantiationEnabled=function(a){return F(a)?(k=a,this):k};this.$get=["$rootScope","$location","$routeParams","$q","$injector","$templateRequest","$sce","$browser",function(a,f,b,c,m,k,u,n){function p(e){var h=q.current;(y=(s=C())&&h&&s.$$route===h.$$route&&d.equals(s.pathParams,h.pathParams)&&
+!s.reloadOnSearch&&!D)||!h&&!s||a.$broadcast("$routeChangeStart",s,h).defaultPrevented&&e&&e.preventDefault()}function l(){var e=q.current,h=s;if(y)e.params=h.params,d.copy(e.params,b),a.$broadcast("$routeUpdate",e);else if(h||e){D=!1;q.current=h;var H=c.resolve(h);n.$$incOutstandingRequestCount();H.then(w).then(z).then(function(c){return c&&H.then(A).then(function(c){h===q.current&&(h&&(h.locals=c,d.copy(h.params,b)),a.$broadcast("$routeChangeSuccess",h,e))})}).catch(function(b){h===q.current&&a.$broadcast("$routeChangeError",
+h,e,b)}).finally(function(){n.$$completeOutstandingRequest(G)})}}function w(e){var a={route:e,hasRedirection:!1};if(e)if(e.redirectTo)if(d.isString(e.redirectTo))a.path=x(e.redirectTo,e.params),a.search=e.params,a.hasRedirection=!0;else{var b=f.path(),g=f.search();e=e.redirectTo(e.pathParams,b,g);d.isDefined(e)&&(a.url=e,a.hasRedirection=!0)}else if(e.resolveRedirectTo)return c.resolve(m.invoke(e.resolveRedirectTo)).then(function(e){d.isDefined(e)&&(a.url=e,a.hasRedirection=!0);return a});return a}
+function z(a){var b=!0;if(a.route!==q.current)b=!1;else if(a.hasRedirection){var d=f.url(),c=a.url;c?f.url(c).replace():c=f.path(a.path).search(a.search).replace().url();c!==d&&(b=!1)}return b}function A(a){if(a){var b=d.extend({},a.resolve);d.forEach(b,function(a,e){b[e]=d.isString(a)?m.get(a):m.invoke(a,null,null,e)});a=B(a);d.isDefined(a)&&(b.$template=a);return c.all(b)}}function B(a){var b,c;d.isDefined(b=a.template)?d.isFunction(b)&&(b=b(a.params)):d.isDefined(c=a.templateUrl)&&(d.isFunction(c)&&
+(c=c(a.params)),d.isDefined(c)&&(a.loadedTemplateUrl=u.valueOf(c),b=k(c)));return b}function C(){var a,b;d.forEach(g,function(c,g){var r;if(r=!b){var k=f.path();r=c.keys;var m={};if(c.regexp)if(k=c.regexp.exec(k)){for(var l=1,n=k.length;l<n;++l){var p=r[l-1],q=k[l];p&&q&&(m[p.name]=q)}r=m}else r=null;else r=null;r=a=r}r&&(b=t(c,{params:d.extend({},f.search(),a),pathParams:a}),b.$$route=c)});return b||g[null]&&t(g[null],{params:{},pathParams:{}})}function x(a,b){var c=[];d.forEach((a||"").split(":"),
+function(a,d){if(0===d)c.push(a);else{var e=a.match(/(\w+)(?:[?*])?(.*)/),f=e[1];c.push(b[f]);c.push(e[2]||"");delete b[f]}});return c.join("")}var D=!1,s,y,q={routes:g,reload:function(){D=!0;var b={defaultPrevented:!1,preventDefault:function(){this.defaultPrevented=!0;D=!1}};a.$evalAsync(function(){p(b);b.defaultPrevented||l()})},updateParams:function(a){if(this.current&&this.current.$$route)a=d.extend({},this.current.params,a),f.path(x(this.current.$$route.originalPath,a)),f.search(a);else throw I("norout");
+}};a.$on("$locationChangeStart",p);a.$on("$locationChangeSuccess",l);return q}]}).run(A),I=d.$$minErr("ngRoute"),k;A.$inject=["$injector"];z.provider("$routeParams",function(){this.$get=function(){return{}}});z.directive("ngView",B);z.directive("ngView",C);B.$inject=["$route","$anchorScroll","$animate"];C.$inject=["$compile","$controller","$route"]})(window,window.angular);
+//# sourceMappingURL=angular-route.min.js.map
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/d3-3.5.17.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/d3-3.5.17.min.js
new file mode 100644
index 0000000..1664873
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/d3-3.5.17.min.js
@@ -0,0 +1,5 @@
+!function(){function n(n){return n&&(n.ownerDocument||n.document||n).documentElement}function t(n){return n&&(n.ownerDocument&&n.ownerDocument.defaultView||n.document&&n||n.defaultView)}function e(n,t){return t>n?-1:n>t?1:n>=t?0:NaN}function r(n){return null===n?NaN:+n}function i(n){return!isNaN(n)}function u(n){return{left:function(t,e,r,i){for(arguments.length<3&&(r=0),arguments.length<4&&(i=t.length);i>r;){var u=r+i>>>1;n(t[u],e)<0?r=u+1:i=u}return r},right:function(t,e,r,i){for(arguments.length<3&&(r=0),arguments.length<4&&(i=t.length);i>r;){var u=r+i>>>1;n(t[u],e)>0?i=u:r=u+1}return r}}}function o(n){return n.length}function a(n){for(var t=1;n*t%1;)t*=10;return t}function l(n,t){for(var e in t)Object.defineProperty(n.prototype,e,{value:t[e],enumerable:!1})}function c(){this._=Object.create(null)}function f(n){return(n+="")===bo||n[0]===_o?_o+n:n}function s(n){return(n+="")[0]===_o?n.slice(1):n}function h(n){return f(n)in this._}function p(n){return(n=f(n))in this._&&delete this._[n]}function g(){var n=[];for(var t in this._)n.push(s(t));return n}function v(){var n=0;for(var t in this._)++n;return n}function d(){for(var n in this._)return!1;return!0}function y(){this._=Object.create(null)}function m(n){return n}function M(n,t,e){return function(){var r=e.apply(t,arguments);return r===t?n:r}}function x(n,t){if(t in n)return t;t=t.charAt(0).toUpperCase()+t.slice(1);for(var e=0,r=wo.length;r>e;++e){var i=wo[e]+t;if(i in n)return i}}function b(){}function _(){}function w(n){function t(){for(var t,r=e,i=-1,u=r.length;++i<u;)(t=r[i].on)&&t.apply(this,arguments);return n}var e=[],r=new c;return t.on=function(t,i){var u,o=r.get(t);return arguments.length<2?o&&o.on:(o&&(o.on=null,e=e.slice(0,u=e.indexOf(o)).concat(e.slice(u+1)),r.remove(t)),i&&e.push(r.set(t,{on:i})),n)},t}function S(){ao.event.preventDefault()}function k(){for(var n,t=ao.event;n=t.sourceEvent;)t=n;return t}function N(n){for(var t=new _,e=0,r=arguments.length;++e<r;)t[arguments[e]]=w(t);return t.of=function(e,r){return function(i){try{var u=i.sourceEvent=ao.event;i.target=n,ao.event=i,t[i.type].apply(e,r)}finally{ao.event=u}}},t}function E(n){return ko(n,Co),n}function A(n){return"function"==typeof n?n:function(){return No(n,this)}}function C(n){return"function"==typeof n?n:function(){return Eo(n,this)}}function z(n,t){function e(){this.removeAttribute(n)}function r(){this.removeAttributeNS(n.space,n.local)}function i(){this.setAttribute(n,t)}function u(){this.setAttributeNS(n.space,n.local,t)}function o(){var e=t.apply(this,arguments);null==e?this.removeAttribute(n):this.setAttribute(n,e)}function a(){var e=t.apply(this,arguments);null==e?this.removeAttributeNS(n.space,n.local):this.setAttributeNS(n.space,n.local,e)}return n=ao.ns.qualify(n),null==t?n.local?r:e:"function"==typeof t?n.local?a:o:n.local?u:i}function L(n){return n.trim().replace(/\s+/g," ")}function q(n){return new RegExp("(?:^|\\s+)"+ao.requote(n)+"(?:\\s+|$)","g")}function T(n){return(n+"").trim().split(/^|\s+/)}function R(n,t){function e(){for(var e=-1;++e<i;)n[e](this,t)}function r(){for(var e=-1,r=t.apply(this,arguments);++e<i;)n[e](this,r)}n=T(n).map(D);var i=n.length;return"function"==typeof t?r:e}function D(n){var t=q(n);return function(e,r){if(i=e.classList)return r?i.add(n):i.remove(n);var i=e.getAttribute("class")||"";r?(t.lastIndex=0,t.test(i)||e.setAttribute("class",L(i+" "+n))):e.setAttribute("class",L(i.replace(t," ")))}}function P(n,t,e){function r(){this.style.removeProperty(n)}function i(){this.style.setProperty(n,t,e)}function u(){var r=t.apply(this,arguments);null==r?this.style.removeProperty(n):this.style.setProperty(n,r,e)}return null==t?r:"function"==typeof t?u:i}function U(n,t){function e(){delete this[n]}function r(){this[n]=t}function i(){var e=t.apply(this,arguments);null==e?delete this[n]:this[n]=e}return null==t?e:"function"==typeof t?i:r}function j(n){function t(){var t=this.ownerDocument,e=this.namespaceURI;return e===zo&&t.documentElement.namespaceURI===zo?t.createElement(n):t.createElementNS(e,n)}function e(){return this.ownerDocument.createElementNS(n.space,n.local)}return"function"==typeof n?n:(n=ao.ns.qualify(n)).local?e:t}function F(){var n=this.parentNode;n&&n.removeChild(this)}function H(n){return{__data__:n}}function O(n){return function(){return Ao(this,n)}}function I(n){return arguments.length||(n=e),function(t,e){return t&&e?n(t.__data__,e.__data__):!t-!e}}function Y(n,t){for(var e=0,r=n.length;r>e;e++)for(var i,u=n[e],o=0,a=u.length;a>o;o++)(i=u[o])&&t(i,o,e);return n}function Z(n){return ko(n,qo),n}function V(n){var t,e;return function(r,i,u){var o,a=n[u].update,l=a.length;for(u!=e&&(e=u,t=0),i>=t&&(t=i+1);!(o=a[t])&&++t<l;);return o}}function X(n,t,e){function r(){var t=this[o];t&&(this.removeEventListener(n,t,t.$),delete this[o])}function i(){var i=l(t,co(arguments));r.call(this),this.addEventListener(n,this[o]=i,i.$=e),i._=t}function u(){var t,e=new RegExp("^__on([^.]+)"+ao.requote(n)+"$");for(var r in this)if(t=r.match(e)){var i=this[r];this.removeEventListener(t[1],i,i.$),delete this[r]}}var o="__on"+n,a=n.indexOf("."),l=$;a>0&&(n=n.slice(0,a));var c=To.get(n);return c&&(n=c,l=B),a?t?i:r:t?b:u}function $(n,t){return function(e){var r=ao.event;ao.event=e,t[0]=this.__data__;try{n.apply(this,t)}finally{ao.event=r}}}function B(n,t){var e=$(n,t);return function(n){var t=this,r=n.relatedTarget;r&&(r===t||8&r.compareDocumentPosition(t))||e.call(t,n)}}function W(e){var r=".dragsuppress-"+ ++Do,i="click"+r,u=ao.select(t(e)).on("touchmove"+r,S).on("dragstart"+r,S).on("selectstart"+r,S);if(null==Ro&&(Ro="onselectstart"in e?!1:x(e.style,"userSelect")),Ro){var o=n(e).style,a=o[Ro];o[Ro]="none"}return function(n){if(u.on(r,null),Ro&&(o[Ro]=a),n){var t=function(){u.on(i,null)};u.on(i,function(){S(),t()},!0),setTimeout(t,0)}}}function J(n,e){e.changedTouches&&(e=e.changedTouches[0]);var r=n.ownerSVGElement||n;if(r.createSVGPoint){var i=r.createSVGPoint();if(0>Po){var u=t(n);if(u.scrollX||u.scrollY){r=ao.select("body").append("svg").style({position:"absolute",top:0,left:0,margin:0,padding:0,border:"none"},"important");var o=r[0][0].getScreenCTM();Po=!(o.f||o.e),r.remove()}}return Po?(i.x=e.pageX,i.y=e.pageY):(i.x=e.clientX,i.y=e.clientY),i=i.matrixTransform(n.getScreenCTM().inverse()),[i.x,i.y]}var a=n.getBoundingClientRect();return[e.clientX-a.left-n.clientLeft,e.clientY-a.top-n.clientTop]}function G(){return ao.event.changedTouches[0].identifier}function K(n){return n>0?1:0>n?-1:0}function Q(n,t,e){return(t[0]-n[0])*(e[1]-n[1])-(t[1]-n[1])*(e[0]-n[0])}function nn(n){return n>1?0:-1>n?Fo:Math.acos(n)}function tn(n){return n>1?Io:-1>n?-Io:Math.asin(n)}function en(n){return((n=Math.exp(n))-1/n)/2}function rn(n){return((n=Math.exp(n))+1/n)/2}function un(n){return((n=Math.exp(2*n))-1)/(n+1)}function on(n){return(n=Math.sin(n/2))*n}function an(){}function ln(n,t,e){return this instanceof ln?(this.h=+n,this.s=+t,void(this.l=+e)):arguments.length<2?n instanceof ln?new ln(n.h,n.s,n.l):_n(""+n,wn,ln):new ln(n,t,e)}function cn(n,t,e){function r(n){return n>360?n-=360:0>n&&(n+=360),60>n?u+(o-u)*n/60:180>n?o:240>n?u+(o-u)*(240-n)/60:u}function i(n){return Math.round(255*r(n))}var u,o;return n=isNaN(n)?0:(n%=360)<0?n+360:n,t=isNaN(t)?0:0>t?0:t>1?1:t,e=0>e?0:e>1?1:e,o=.5>=e?e*(1+t):e+t-e*t,u=2*e-o,new mn(i(n+120),i(n),i(n-120))}function fn(n,t,e){return this instanceof fn?(this.h=+n,this.c=+t,void(this.l=+e)):arguments.length<2?n instanceof fn?new fn(n.h,n.c,n.l):n instanceof hn?gn(n.l,n.a,n.b):gn((n=Sn((n=ao.rgb(n)).r,n.g,n.b)).l,n.a,n.b):new fn(n,t,e)}function sn(n,t,e){return isNaN(n)&&(n=0),isNaN(t)&&(t=0),new hn(e,Math.cos(n*=Yo)*t,Math.sin(n)*t)}function hn(n,t,e){return this instanceof hn?(this.l=+n,this.a=+t,void(this.b=+e)):arguments.length<2?n instanceof hn?new hn(n.l,n.a,n.b):n instanceof fn?sn(n.h,n.c,n.l):Sn((n=mn(n)).r,n.g,n.b):new hn(n,t,e)}function pn(n,t,e){var r=(n+16)/116,i=r+t/500,u=r-e/200;return i=vn(i)*na,r=vn(r)*ta,u=vn(u)*ea,new mn(yn(3.2404542*i-1.5371385*r-.4985314*u),yn(-.969266*i+1.8760108*r+.041556*u),yn(.0556434*i-.2040259*r+1.0572252*u))}function gn(n,t,e){return n>0?new fn(Math.atan2(e,t)*Zo,Math.sqrt(t*t+e*e),n):new fn(NaN,NaN,n)}function vn(n){return n>.206893034?n*n*n:(n-4/29)/7.787037}function dn(n){return n>.008856?Math.pow(n,1/3):7.787037*n+4/29}function yn(n){return Math.round(255*(.00304>=n?12.92*n:1.055*Math.pow(n,1/2.4)-.055))}function mn(n,t,e){return this instanceof mn?(this.r=~~n,this.g=~~t,void(this.b=~~e)):arguments.length<2?n instanceof mn?new mn(n.r,n.g,n.b):_n(""+n,mn,cn):new mn(n,t,e)}function Mn(n){return new mn(n>>16,n>>8&255,255&n)}function xn(n){return Mn(n)+""}function bn(n){return 16>n?"0"+Math.max(0,n).toString(16):Math.min(255,n).toString(16)}function _n(n,t,e){var r,i,u,o=0,a=0,l=0;if(r=/([a-z]+)\((.*)\)/.exec(n=n.toLowerCase()))switch(i=r[2].split(","),r[1]){case"hsl":return e(parseFloat(i[0]),parseFloat(i[1])/100,parseFloat(i[2])/100);case"rgb":return t(Nn(i[0]),Nn(i[1]),Nn(i[2]))}return(u=ua.get(n))?t(u.r,u.g,u.b):(null==n||"#"!==n.charAt(0)||isNaN(u=parseInt(n.slice(1),16))||(4===n.length?(o=(3840&u)>>4,o=o>>4|o,a=240&u,a=a>>4|a,l=15&u,l=l<<4|l):7===n.length&&(o=(16711680&u)>>16,a=(65280&u)>>8,l=255&u)),t(o,a,l))}function wn(n,t,e){var r,i,u=Math.min(n/=255,t/=255,e/=255),o=Math.max(n,t,e),a=o-u,l=(o+u)/2;return a?(i=.5>l?a/(o+u):a/(2-o-u),r=n==o?(t-e)/a+(e>t?6:0):t==o?(e-n)/a+2:(n-t)/a+4,r*=60):(r=NaN,i=l>0&&1>l?0:r),new ln(r,i,l)}function Sn(n,t,e){n=kn(n),t=kn(t),e=kn(e);var r=dn((.4124564*n+.3575761*t+.1804375*e)/na),i=dn((.2126729*n+.7151522*t+.072175*e)/ta),u=dn((.0193339*n+.119192*t+.9503041*e)/ea);return hn(116*i-16,500*(r-i),200*(i-u))}function kn(n){return(n/=255)<=.04045?n/12.92:Math.pow((n+.055)/1.055,2.4)}function Nn(n){var t=parseFloat(n);return"%"===n.charAt(n.length-1)?Math.round(2.55*t):t}function En(n){return"function"==typeof n?n:function(){return n}}function An(n){return function(t,e,r){return 2===arguments.length&&"function"==typeof e&&(r=e,e=null),Cn(t,e,n,r)}}function Cn(n,t,e,r){function i(){var n,t=l.status;if(!t&&Ln(l)||t>=200&&300>t||304===t){try{n=e.call(u,l)}catch(r){return void o.error.call(u,r)}o.load.call(u,n)}else o.error.call(u,l)}var u={},o=ao.dispatch("beforesend","progress","load","error"),a={},l=new XMLHttpRequest,c=null;return!this.XDomainRequest||"withCredentials"in l||!/^(http(s)?:)?\/\//.test(n)||(l=new XDomainRequest),"onload"in l?l.onload=l.onerror=i:l.onreadystatechange=function(){l.readyState>3&&i()},l.onprogress=function(n){var t=ao.event;ao.event=n;try{o.progress.call(u,l)}finally{ao.event=t}},u.header=function(n,t){return n=(n+"").toLowerCase(),arguments.length<2?a[n]:(null==t?delete a[n]:a[n]=t+"",u)},u.mimeType=function(n){return arguments.length?(t=null==n?null:n+"",u):t},u.responseType=function(n){return arguments.length?(c=n,u):c},u.response=function(n){return e=n,u},["get","post"].forEach(function(n){u[n]=function(){return u.send.apply(u,[n].concat(co(arguments)))}}),u.send=function(e,r,i){if(2===arguments.length&&"function"==typeof r&&(i=r,r=null),l.open(e,n,!0),null==t||"accept"in a||(a.accept=t+",*/*"),l.setRequestHeader)for(var f in a)l.setRequestHeader(f,a[f]);return null!=t&&l.overrideMimeType&&l.overrideMimeType(t),null!=c&&(l.responseType=c),null!=i&&u.on("error",i).on("load",function(n){i(null,n)}),o.beforesend.call(u,l),l.send(null==r?null:r),u},u.abort=function(){return l.abort(),u},ao.rebind(u,o,"on"),null==r?u:u.get(zn(r))}function zn(n){return 1===n.length?function(t,e){n(null==t?e:null)}:n}function Ln(n){var t=n.responseType;return t&&"text"!==t?n.response:n.responseText}function qn(n,t,e){var r=arguments.length;2>r&&(t=0),3>r&&(e=Date.now());var i=e+t,u={c:n,t:i,n:null};return aa?aa.n=u:oa=u,aa=u,la||(ca=clearTimeout(ca),la=1,fa(Tn)),u}function Tn(){var n=Rn(),t=Dn()-n;t>24?(isFinite(t)&&(clearTimeout(ca),ca=setTimeout(Tn,t)),la=0):(la=1,fa(Tn))}function Rn(){for(var n=Date.now(),t=oa;t;)n>=t.t&&t.c(n-t.t)&&(t.c=null),t=t.n;return n}function Dn(){for(var n,t=oa,e=1/0;t;)t.c?(t.t<e&&(e=t.t),t=(n=t).n):t=n?n.n=t.n:oa=t.n;return aa=n,e}function Pn(n,t){return t-(n?Math.ceil(Math.log(n)/Math.LN10):1)}function Un(n,t){var e=Math.pow(10,3*xo(8-t));return{scale:t>8?function(n){return n/e}:function(n){return n*e},symbol:n}}function jn(n){var t=n.decimal,e=n.thousands,r=n.grouping,i=n.currency,u=r&&e?function(n,t){for(var i=n.length,u=[],o=0,a=r[0],l=0;i>0&&a>0&&(l+a+1>t&&(a=Math.max(1,t-l)),u.push(n.substring(i-=a,i+a)),!((l+=a+1)>t));)a=r[o=(o+1)%r.length];return u.reverse().join(e)}:m;return function(n){var e=ha.exec(n),r=e[1]||" ",o=e[2]||">",a=e[3]||"-",l=e[4]||"",c=e[5],f=+e[6],s=e[7],h=e[8],p=e[9],g=1,v="",d="",y=!1,m=!0;switch(h&&(h=+h.substring(1)),(c||"0"===r&&"="===o)&&(c=r="0",o="="),p){case"n":s=!0,p="g";break;case"%":g=100,d="%",p="f";break;case"p":g=100,d="%",p="r";break;case"b":case"o":case"x":case"X":"#"===l&&(v="0"+p.toLowerCase());case"c":m=!1;case"d":y=!0,h=0;break;case"s":g=-1,p="r"}"$"===l&&(v=i[0],d=i[1]),"r"!=p||h||(p="g"),null!=h&&("g"==p?h=Math.max(1,Math.min(21,h)):"e"!=p&&"f"!=p||(h=Math.max(0,Math.min(20,h)))),p=pa.get(p)||Fn;var M=c&&s;return function(n){var e=d;if(y&&n%1)return"";var i=0>n||0===n&&0>1/n?(n=-n,"-"):"-"===a?"":a;if(0>g){var l=ao.formatPrefix(n,h);n=l.scale(n),e=l.symbol+d}else n*=g;n=p(n,h);var x,b,_=n.lastIndexOf(".");if(0>_){var w=m?n.lastIndexOf("e"):-1;0>w?(x=n,b=""):(x=n.substring(0,w),b=n.substring(w))}else x=n.substring(0,_),b=t+n.substring(_+1);!c&&s&&(x=u(x,1/0));var S=v.length+x.length+b.length+(M?0:i.length),k=f>S?new Array(S=f-S+1).join(r):"";return M&&(x=u(k+x,k.length?f-b.length:1/0)),i+=v,n=x+b,("<"===o?i+n+k:">"===o?k+i+n:"^"===o?k.substring(0,S>>=1)+i+n+k.substring(S):i+(M?n:k+n))+e}}}function Fn(n){return n+""}function Hn(){this._=new Date(arguments.length>1?Date.UTC.apply(this,arguments):arguments[0])}function On(n,t,e){function r(t){var e=n(t),r=u(e,1);return r-t>t-e?e:r}function i(e){return t(e=n(new va(e-1)),1),e}function u(n,e){return t(n=new va(+n),e),n}function o(n,r,u){var o=i(n),a=[];if(u>1)for(;r>o;)e(o)%u||a.push(new Date(+o)),t(o,1);else for(;r>o;)a.push(new Date(+o)),t(o,1);return a}function a(n,t,e){try{va=Hn;var r=new Hn;return r._=n,o(r,t,e)}finally{va=Date}}n.floor=n,n.round=r,n.ceil=i,n.offset=u,n.range=o;var l=n.utc=In(n);return l.floor=l,l.round=In(r),l.ceil=In(i),l.offset=In(u),l.range=a,n}function In(n){return function(t,e){try{va=Hn;var r=new Hn;return r._=t,n(r,e)._}finally{va=Date}}}function Yn(n){function t(n){function t(t){for(var e,i,u,o=[],a=-1,l=0;++a<r;)37===n.charCodeAt(a)&&(o.push(n.slice(l,a)),null!=(i=ya[e=n.charAt(++a)])&&(e=n.charAt(++a)),(u=A[e])&&(e=u(t,null==i?"e"===e?" ":"0":i)),o.push(e),l=a+1);return o.push(n.slice(l,a)),o.join("")}var r=n.length;return t.parse=function(t){var r={y:1900,m:0,d:1,H:0,M:0,S:0,L:0,Z:null},i=e(r,n,t,0);if(i!=t.length)return null;"p"in r&&(r.H=r.H%12+12*r.p);var u=null!=r.Z&&va!==Hn,o=new(u?Hn:va);return"j"in r?o.setFullYear(r.y,0,r.j):"W"in r||"U"in r?("w"in r||(r.w="W"in r?1:0),o.setFullYear(r.y,0,1),o.setFullYear(r.y,0,"W"in r?(r.w+6)%7+7*r.W-(o.getDay()+5)%7:r.w+7*r.U-(o.getDay()+6)%7)):o.setFullYear(r.y,r.m,r.d),o.setHours(r.H+(r.Z/100|0),r.M+r.Z%100,r.S,r.L),u?o._:o},t.toString=function(){return n},t}function e(n,t,e,r){for(var i,u,o,a=0,l=t.length,c=e.length;l>a;){if(r>=c)return-1;if(i=t.charCodeAt(a++),37===i){if(o=t.charAt(a++),u=C[o in ya?t.charAt(a++):o],!u||(r=u(n,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}function r(n,t,e){_.lastIndex=0;var r=_.exec(t.slice(e));return r?(n.w=w.get(r[0].toLowerCase()),e+r[0].length):-1}function i(n,t,e){x.lastIndex=0;var r=x.exec(t.slice(e));return r?(n.w=b.get(r[0].toLowerCase()),e+r[0].length):-1}function u(n,t,e){N.lastIndex=0;var r=N.exec(t.slice(e));return r?(n.m=E.get(r[0].toLowerCase()),e+r[0].length):-1}function o(n,t,e){S.lastIndex=0;var r=S.exec(t.slice(e));return r?(n.m=k.get(r[0].toLowerCase()),e+r[0].length):-1}function a(n,t,r){return e(n,A.c.toString(),t,r)}function l(n,t,r){return e(n,A.x.toString(),t,r)}function c(n,t,r){return e(n,A.X.toString(),t,r)}function f(n,t,e){var r=M.get(t.slice(e,e+=2).toLowerCase());return null==r?-1:(n.p=r,e)}var s=n.dateTime,h=n.date,p=n.time,g=n.periods,v=n.days,d=n.shortDays,y=n.months,m=n.shortMonths;t.utc=function(n){function e(n){try{va=Hn;var t=new va;return t._=n,r(t)}finally{va=Date}}var r=t(n);return e.parse=function(n){try{va=Hn;var t=r.parse(n);return t&&t._}finally{va=Date}},e.toString=r.toString,e},t.multi=t.utc.multi=ct;var M=ao.map(),x=Vn(v),b=Xn(v),_=Vn(d),w=Xn(d),S=Vn(y),k=Xn(y),N=Vn(m),E=Xn(m);g.forEach(function(n,t){M.set(n.toLowerCase(),t)});var A={a:function(n){return d[n.getDay()]},A:function(n){return v[n.getDay()]},b:function(n){return m[n.getMonth()]},B:function(n){return y[n.getMonth()]},c:t(s),d:function(n,t){return Zn(n.getDate(),t,2)},e:function(n,t){return Zn(n.getDate(),t,2)},H:function(n,t){return Zn(n.getHours(),t,2)},I:function(n,t){return Zn(n.getHours()%12||12,t,2)},j:function(n,t){return Zn(1+ga.dayOfYear(n),t,3)},L:function(n,t){return Zn(n.getMilliseconds(),t,3)},m:function(n,t){return Zn(n.getMonth()+1,t,2)},M:function(n,t){return Zn(n.getMinutes(),t,2)},p:function(n){return g[+(n.getHours()>=12)]},S:function(n,t){return Zn(n.getSeconds(),t,2)},U:function(n,t){return Zn(ga.sundayOfYear(n),t,2)},w:function(n){return n.getDay()},W:function(n,t){return Zn(ga.mondayOfYear(n),t,2)},x:t(h),X:t(p),y:function(n,t){return Zn(n.getFullYear()%100,t,2)},Y:function(n,t){return Zn(n.getFullYear()%1e4,t,4)},Z:at,"%":function(){return"%"}},C={a:r,A:i,b:u,B:o,c:a,d:tt,e:tt,H:rt,I:rt,j:et,L:ot,m:nt,M:it,p:f,S:ut,U:Bn,w:$n,W:Wn,x:l,X:c,y:Gn,Y:Jn,Z:Kn,"%":lt};return t}function Zn(n,t,e){var r=0>n?"-":"",i=(r?-n:n)+"",u=i.length;return r+(e>u?new Array(e-u+1).join(t)+i:i)}function Vn(n){return new RegExp("^(?:"+n.map(ao.requote).join("|")+")","i")}function Xn(n){for(var t=new c,e=-1,r=n.length;++e<r;)t.set(n[e].toLowerCase(),e);return t}function $n(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+1));return r?(n.w=+r[0],e+r[0].length):-1}function Bn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e));return r?(n.U=+r[0],e+r[0].length):-1}function Wn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e));return r?(n.W=+r[0],e+r[0].length):-1}function Jn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+4));return r?(n.y=+r[0],e+r[0].length):-1}function Gn(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.y=Qn(+r[0]),e+r[0].length):-1}function Kn(n,t,e){return/^[+-]\d{4}$/.test(t=t.slice(e,e+5))?(n.Z=-t,e+5):-1}function Qn(n){return n+(n>68?1900:2e3)}function nt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.m=r[0]-1,e+r[0].length):-1}function tt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.d=+r[0],e+r[0].length):-1}function et(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+3));return r?(n.j=+r[0],e+r[0].length):-1}function rt(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.H=+r[0],e+r[0].length):-1}function it(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.M=+r[0],e+r[0].length):-1}function ut(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+2));return r?(n.S=+r[0],e+r[0].length):-1}function ot(n,t,e){ma.lastIndex=0;var r=ma.exec(t.slice(e,e+3));return r?(n.L=+r[0],e+r[0].length):-1}function at(n){var t=n.getTimezoneOffset(),e=t>0?"-":"+",r=xo(t)/60|0,i=xo(t)%60;return e+Zn(r,"0",2)+Zn(i,"0",2)}function lt(n,t,e){Ma.lastIndex=0;var r=Ma.exec(t.slice(e,e+1));return r?e+r[0].length:-1}function ct(n){for(var t=n.length,e=-1;++e<t;)n[e][0]=this(n[e][0]);return function(t){for(var e=0,r=n[e];!r[1](t);)r=n[++e];return r[0](t)}}function ft(){}function st(n,t,e){var r=e.s=n+t,i=r-n,u=r-i;e.t=n-u+(t-i)}function ht(n,t){n&&wa.hasOwnProperty(n.type)&&wa[n.type](n,t)}function pt(n,t,e){var r,i=-1,u=n.length-e;for(t.lineStart();++i<u;)r=n[i],t.point(r[0],r[1],r[2]);t.lineEnd()}function gt(n,t){var e=-1,r=n.length;for(t.polygonStart();++e<r;)pt(n[e],t,1);t.polygonEnd()}function vt(){function n(n,t){n*=Yo,t=t*Yo/2+Fo/4;var e=n-r,o=e>=0?1:-1,a=o*e,l=Math.cos(t),c=Math.sin(t),f=u*c,s=i*l+f*Math.cos(a),h=f*o*Math.sin(a);ka.add(Math.atan2(h,s)),r=n,i=l,u=c}var t,e,r,i,u;Na.point=function(o,a){Na.point=n,r=(t=o)*Yo,i=Math.cos(a=(e=a)*Yo/2+Fo/4),u=Math.sin(a)},Na.lineEnd=function(){n(t,e)}}function dt(n){var t=n[0],e=n[1],r=Math.cos(e);return[r*Math.cos(t),r*Math.sin(t),Math.sin(e)]}function yt(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]}function mt(n,t){return[n[1]*t[2]-n[2]*t[1],n[2]*t[0]-n[0]*t[2],n[0]*t[1]-n[1]*t[0]]}function Mt(n,t){n[0]+=t[0],n[1]+=t[1],n[2]+=t[2]}function xt(n,t){return[n[0]*t,n[1]*t,n[2]*t]}function bt(n){var t=Math.sqrt(n[0]*n[0]+n[1]*n[1]+n[2]*n[2]);n[0]/=t,n[1]/=t,n[2]/=t}function _t(n){return[Math.atan2(n[1],n[0]),tn(n[2])]}function wt(n,t){return xo(n[0]-t[0])<Uo&&xo(n[1]-t[1])<Uo}function St(n,t){n*=Yo;var e=Math.cos(t*=Yo);kt(e*Math.cos(n),e*Math.sin(n),Math.sin(t))}function kt(n,t,e){++Ea,Ca+=(n-Ca)/Ea,za+=(t-za)/Ea,La+=(e-La)/Ea}function Nt(){function n(n,i){n*=Yo;var u=Math.cos(i*=Yo),o=u*Math.cos(n),a=u*Math.sin(n),l=Math.sin(i),c=Math.atan2(Math.sqrt((c=e*l-r*a)*c+(c=r*o-t*l)*c+(c=t*a-e*o)*c),t*o+e*a+r*l);Aa+=c,qa+=c*(t+(t=o)),Ta+=c*(e+(e=a)),Ra+=c*(r+(r=l)),kt(t,e,r)}var t,e,r;ja.point=function(i,u){i*=Yo;var o=Math.cos(u*=Yo);t=o*Math.cos(i),e=o*Math.sin(i),r=Math.sin(u),ja.point=n,kt(t,e,r)}}function Et(){ja.point=St}function At(){function n(n,t){n*=Yo;var e=Math.cos(t*=Yo),o=e*Math.cos(n),a=e*Math.sin(n),l=Math.sin(t),c=i*l-u*a,f=u*o-r*l,s=r*a-i*o,h=Math.sqrt(c*c+f*f+s*s),p=r*o+i*a+u*l,g=h&&-nn(p)/h,v=Math.atan2(h,p);Da+=g*c,Pa+=g*f,Ua+=g*s,Aa+=v,qa+=v*(r+(r=o)),Ta+=v*(i+(i=a)),Ra+=v*(u+(u=l)),kt(r,i,u)}var t,e,r,i,u;ja.point=function(o,a){t=o,e=a,ja.point=n,o*=Yo;var l=Math.cos(a*=Yo);r=l*Math.cos(o),i=l*Math.sin(o),u=Math.sin(a),kt(r,i,u)},ja.lineEnd=function(){n(t,e),ja.lineEnd=Et,ja.point=St}}function Ct(n,t){function e(e,r){return e=n(e,r),t(e[0],e[1])}return n.invert&&t.invert&&(e.invert=function(e,r){return e=t.invert(e,r),e&&n.invert(e[0],e[1])}),e}function zt(){return!0}function Lt(n,t,e,r,i){var u=[],o=[];if(n.forEach(function(n){if(!((t=n.length-1)<=0)){var t,e=n[0],r=n[t];if(wt(e,r)){i.lineStart();for(var a=0;t>a;++a)i.point((e=n[a])[0],e[1]);return void i.lineEnd()}var l=new Tt(e,n,null,!0),c=new Tt(e,null,l,!1);l.o=c,u.push(l),o.push(c),l=new Tt(r,n,null,!1),c=new Tt(r,null,l,!0),l.o=c,u.push(l),o.push(c)}}),o.sort(t),qt(u),qt(o),u.length){for(var a=0,l=e,c=o.length;c>a;++a)o[a].e=l=!l;for(var f,s,h=u[0];;){for(var p=h,g=!0;p.v;)if((p=p.n)===h)return;f=p.z,i.lineStart();do{if(p.v=p.o.v=!0,p.e){if(g)for(var a=0,c=f.length;c>a;++a)i.point((s=f[a])[0],s[1]);else r(p.x,p.n.x,1,i);p=p.n}else{if(g){f=p.p.z;for(var a=f.length-1;a>=0;--a)i.point((s=f[a])[0],s[1])}else r(p.x,p.p.x,-1,i);p=p.p}p=p.o,f=p.z,g=!g}while(!p.v);i.lineEnd()}}}function qt(n){if(t=n.length){for(var t,e,r=0,i=n[0];++r<t;)i.n=e=n[r],e.p=i,i=e;i.n=e=n[0],e.p=i}}function Tt(n,t,e,r){this.x=n,this.z=t,this.o=e,this.e=r,this.v=!1,this.n=this.p=null}function Rt(n,t,e,r){return function(i,u){function o(t,e){var r=i(t,e);n(t=r[0],e=r[1])&&u.point(t,e)}function a(n,t){var e=i(n,t);d.point(e[0],e[1])}function l(){m.point=a,d.lineStart()}function c(){m.point=o,d.lineEnd()}function f(n,t){v.push([n,t]);var e=i(n,t);x.point(e[0],e[1])}function s(){x.lineStart(),v=[]}function h(){f(v[0][0],v[0][1]),x.lineEnd();var n,t=x.clean(),e=M.buffer(),r=e.length;if(v.pop(),g.push(v),v=null,r)if(1&t){n=e[0];var i,r=n.length-1,o=-1;if(r>0){for(b||(u.polygonStart(),b=!0),u.lineStart();++o<r;)u.point((i=n[o])[0],i[1]);u.lineEnd()}}else r>1&&2&t&&e.push(e.pop().concat(e.shift())),p.push(e.filter(Dt))}var p,g,v,d=t(u),y=i.invert(r[0],r[1]),m={point:o,lineStart:l,lineEnd:c,polygonStart:function(){m.point=f,m.lineStart=s,m.lineEnd=h,p=[],g=[]},polygonEnd:function(){m.point=o,m.lineStart=l,m.lineEnd=c,p=ao.merge(p);var n=Ot(y,g);p.length?(b||(u.polygonStart(),b=!0),Lt(p,Ut,n,e,u)):n&&(b||(u.polygonStart(),b=!0),u.lineStart(),e(null,null,1,u),u.lineEnd()),b&&(u.polygonEnd(),b=!1),p=g=null},sphere:function(){u.polygonStart(),u.lineStart(),e(null,null,1,u),u.lineEnd(),u.polygonEnd()}},M=Pt(),x=t(M),b=!1;return m}}function Dt(n){return n.length>1}function Pt(){var n,t=[];return{lineStart:function(){t.push(n=[])},point:function(t,e){n.push([t,e])},lineEnd:b,buffer:function(){var e=t;return t=[],n=null,e},rejoin:function(){t.length>1&&t.push(t.pop().concat(t.shift()))}}}function Ut(n,t){return((n=n.x)[0]<0?n[1]-Io-Uo:Io-n[1])-((t=t.x)[0]<0?t[1]-Io-Uo:Io-t[1])}function jt(n){var t,e=NaN,r=NaN,i=NaN;return{lineStart:function(){n.lineStart(),t=1},point:function(u,o){var a=u>0?Fo:-Fo,l=xo(u-e);xo(l-Fo)<Uo?(n.point(e,r=(r+o)/2>0?Io:-Io),n.point(i,r),n.lineEnd(),n.lineStart(),n.point(a,r),n.point(u,r),t=0):i!==a&&l>=Fo&&(xo(e-i)<Uo&&(e-=i*Uo),xo(u-a)<Uo&&(u-=a*Uo),r=Ft(e,r,u,o),n.point(i,r),n.lineEnd(),n.lineStart(),n.point(a,r),t=0),n.point(e=u,r=o),i=a},lineEnd:function(){n.lineEnd(),e=r=NaN},clean:function(){return 2-t}}}function Ft(n,t,e,r){var i,u,o=Math.sin(n-e);return xo(o)>Uo?Math.atan((Math.sin(t)*(u=Math.cos(r))*Math.sin(e)-Math.sin(r)*(i=Math.cos(t))*Math.sin(n))/(i*u*o)):(t+r)/2}function Ht(n,t,e,r){var i;if(null==n)i=e*Io,r.point(-Fo,i),r.point(0,i),r.point(Fo,i),r.point(Fo,0),r.point(Fo,-i),r.point(0,-i),r.point(-Fo,-i),r.point(-Fo,0),r.point(-Fo,i);else if(xo(n[0]-t[0])>Uo){var u=n[0]<t[0]?Fo:-Fo;i=e*u/2,r.point(-u,i),r.point(0,i),r.point(u,i)}else r.point(t[0],t[1])}function Ot(n,t){var e=n[0],r=n[1],i=[Math.sin(e),-Math.cos(e),0],u=0,o=0;ka.reset();for(var a=0,l=t.length;l>a;++a){var c=t[a],f=c.length;if(f)for(var s=c[0],h=s[0],p=s[1]/2+Fo/4,g=Math.sin(p),v=Math.cos(p),d=1;;){d===f&&(d=0),n=c[d];var y=n[0],m=n[1]/2+Fo/4,M=Math.sin(m),x=Math.cos(m),b=y-h,_=b>=0?1:-1,w=_*b,S=w>Fo,k=g*M;if(ka.add(Math.atan2(k*_*Math.sin(w),v*x+k*Math.cos(w))),u+=S?b+_*Ho:b,S^h>=e^y>=e){var N=mt(dt(s),dt(n));bt(N);var E=mt(i,N);bt(E);var A=(S^b>=0?-1:1)*tn(E[2]);(r>A||r===A&&(N[0]||N[1]))&&(o+=S^b>=0?1:-1)}if(!d++)break;h=y,g=M,v=x,s=n}}return(-Uo>u||Uo>u&&-Uo>ka)^1&o}function It(n){function t(n,t){return Math.cos(n)*Math.cos(t)>u}function e(n){var e,u,l,c,f;return{lineStart:function(){c=l=!1,f=1},point:function(s,h){var p,g=[s,h],v=t(s,h),d=o?v?0:i(s,h):v?i(s+(0>s?Fo:-Fo),h):0;if(!e&&(c=l=v)&&n.lineStart(),v!==l&&(p=r(e,g),(wt(e,p)||wt(g,p))&&(g[0]+=Uo,g[1]+=Uo,v=t(g[0],g[1]))),v!==l)f=0,v?(n.lineStart(),p=r(g,e),n.point(p[0],p[1])):(p=r(e,g),n.point(p[0],p[1]),n.lineEnd()),e=p;else if(a&&e&&o^v){var y;d&u||!(y=r(g,e,!0))||(f=0,o?(n.lineStart(),n.point(y[0][0],y[0][1]),n.point(y[1][0],y[1][1]),n.lineEnd()):(n.point(y[1][0],y[1][1]),n.lineEnd(),n.lineStart(),n.point(y[0][0],y[0][1])))}!v||e&&wt(e,g)||n.point(g[0],g[1]),e=g,l=v,u=d},lineEnd:function(){l&&n.lineEnd(),e=null},clean:function(){return f|(c&&l)<<1}}}function r(n,t,e){var r=dt(n),i=dt(t),o=[1,0,0],a=mt(r,i),l=yt(a,a),c=a[0],f=l-c*c;if(!f)return!e&&n;var s=u*l/f,h=-u*c/f,p=mt(o,a),g=xt(o,s),v=xt(a,h);Mt(g,v);var d=p,y=yt(g,d),m=yt(d,d),M=y*y-m*(yt(g,g)-1);if(!(0>M)){var x=Math.sqrt(M),b=xt(d,(-y-x)/m);if(Mt(b,g),b=_t(b),!e)return b;var _,w=n[0],S=t[0],k=n[1],N=t[1];w>S&&(_=w,w=S,S=_);var E=S-w,A=xo(E-Fo)<Uo,C=A||Uo>E;if(!A&&k>N&&(_=k,k=N,N=_),C?A?k+N>0^b[1]<(xo(b[0]-w)<Uo?k:N):k<=b[1]&&b[1]<=N:E>Fo^(w<=b[0]&&b[0]<=S)){var z=xt(d,(-y+x)/m);return Mt(z,g),[b,_t(z)]}}}function i(t,e){var r=o?n:Fo-n,i=0;return-r>t?i|=1:t>r&&(i|=2),-r>e?i|=4:e>r&&(i|=8),i}var u=Math.cos(n),o=u>0,a=xo(u)>Uo,l=ve(n,6*Yo);return Rt(t,e,l,o?[0,-n]:[-Fo,n-Fo])}function Yt(n,t,e,r){return function(i){var u,o=i.a,a=i.b,l=o.x,c=o.y,f=a.x,s=a.y,h=0,p=1,g=f-l,v=s-c;if(u=n-l,g||!(u>0)){if(u/=g,0>g){if(h>u)return;p>u&&(p=u)}else if(g>0){if(u>p)return;u>h&&(h=u)}if(u=e-l,g||!(0>u)){if(u/=g,0>g){if(u>p)return;u>h&&(h=u)}else if(g>0){if(h>u)return;p>u&&(p=u)}if(u=t-c,v||!(u>0)){if(u/=v,0>v){if(h>u)return;p>u&&(p=u)}else if(v>0){if(u>p)return;u>h&&(h=u)}if(u=r-c,v||!(0>u)){if(u/=v,0>v){if(u>p)return;u>h&&(h=u)}else if(v>0){if(h>u)return;p>u&&(p=u)}return h>0&&(i.a={x:l+h*g,y:c+h*v}),1>p&&(i.b={x:l+p*g,y:c+p*v}),i}}}}}}function Zt(n,t,e,r){function i(r,i){return xo(r[0]-n)<Uo?i>0?0:3:xo(r[0]-e)<Uo?i>0?2:1:xo(r[1]-t)<Uo?i>0?1:0:i>0?3:2}function u(n,t){return o(n.x,t.x)}function o(n,t){var e=i(n,1),r=i(t,1);return e!==r?e-r:0===e?t[1]-n[1]:1===e?n[0]-t[0]:2===e?n[1]-t[1]:t[0]-n[0]}return function(a){function l(n){for(var t=0,e=d.length,r=n[1],i=0;e>i;++i)for(var u,o=1,a=d[i],l=a.length,c=a[0];l>o;++o)u=a[o],c[1]<=r?u[1]>r&&Q(c,u,n)>0&&++t:u[1]<=r&&Q(c,u,n)<0&&--t,c=u;return 0!==t}function c(u,a,l,c){var f=0,s=0;if(null==u||(f=i(u,l))!==(s=i(a,l))||o(u,a)<0^l>0){do c.point(0===f||3===f?n:e,f>1?r:t);while((f=(f+l+4)%4)!==s)}else c.point(a[0],a[1])}function f(i,u){return i>=n&&e>=i&&u>=t&&r>=u}function s(n,t){f(n,t)&&a.point(n,t)}function h(){C.point=g,d&&d.push(y=[]),S=!0,w=!1,b=_=NaN}function p(){v&&(g(m,M),x&&w&&E.rejoin(),v.push(E.buffer())),C.point=s,w&&a.lineEnd()}function g(n,t){n=Math.max(-Ha,Math.min(Ha,n)),t=Math.max(-Ha,Math.min(Ha,t));var e=f(n,t);if(d&&y.push([n,t]),S)m=n,M=t,x=e,S=!1,e&&(a.lineStart(),a.point(n,t));else if(e&&w)a.point(n,t);else{var r={a:{x:b,y:_},b:{x:n,y:t}};A(r)?(w||(a.lineStart(),a.point(r.a.x,r.a.y)),a.point(r.b.x,r.b.y),e||a.lineEnd(),k=!1):e&&(a.lineStart(),a.point(n,t),k=!1)}b=n,_=t,w=e}var v,d,y,m,M,x,b,_,w,S,k,N=a,E=Pt(),A=Yt(n,t,e,r),C={point:s,lineStart:h,lineEnd:p,polygonStart:function(){a=E,v=[],d=[],k=!0},polygonEnd:function(){a=N,v=ao.merge(v);var t=l([n,r]),e=k&&t,i=v.length;(e||i)&&(a.polygonStart(),e&&(a.lineStart(),c(null,null,1,a),a.lineEnd()),i&&Lt(v,u,t,c,a),a.polygonEnd()),v=d=y=null}};return C}}function Vt(n){var t=0,e=Fo/3,r=ae(n),i=r(t,e);return i.parallels=function(n){return arguments.length?r(t=n[0]*Fo/180,e=n[1]*Fo/180):[t/Fo*180,e/Fo*180]},i}function Xt(n,t){function e(n,t){var e=Math.sqrt(u-2*i*Math.sin(t))/i;return[e*Math.sin(n*=i),o-e*Math.cos(n)]}var r=Math.sin(n),i=(r+Math.sin(t))/2,u=1+r*(2*i-r),o=Math.sqrt(u)/i;return e.invert=function(n,t){var e=o-t;return[Math.atan2(n,e)/i,tn((u-(n*n+e*e)*i*i)/(2*i))]},e}function $t(){function n(n,t){Ia+=i*n-r*t,r=n,i=t}var t,e,r,i;$a.point=function(u,o){$a.point=n,t=r=u,e=i=o},$a.lineEnd=function(){n(t,e)}}function Bt(n,t){Ya>n&&(Ya=n),n>Va&&(Va=n),Za>t&&(Za=t),t>Xa&&(Xa=t)}function Wt(){function n(n,t){o.push("M",n,",",t,u)}function t(n,t){o.push("M",n,",",t),a.point=e}function e(n,t){o.push("L",n,",",t)}function r(){a.point=n}function i(){o.push("Z")}var u=Jt(4.5),o=[],a={point:n,lineStart:function(){a.point=t},lineEnd:r,polygonStart:function(){a.lineEnd=i},polygonEnd:function(){a.lineEnd=r,a.point=n},pointRadius:function(n){return u=Jt(n),a},result:function(){if(o.length){var n=o.join("");return o=[],n}}};return a}function Jt(n){return"m0,"+n+"a"+n+","+n+" 0 1,1 0,"+-2*n+"a"+n+","+n+" 0 1,1 0,"+2*n+"z"}function Gt(n,t){Ca+=n,za+=t,++La}function Kt(){function n(n,r){var i=n-t,u=r-e,o=Math.sqrt(i*i+u*u);qa+=o*(t+n)/2,Ta+=o*(e+r)/2,Ra+=o,Gt(t=n,e=r)}var t,e;Wa.point=function(r,i){Wa.point=n,Gt(t=r,e=i)}}function Qt(){Wa.point=Gt}function ne(){function n(n,t){var e=n-r,u=t-i,o=Math.sqrt(e*e+u*u);qa+=o*(r+n)/2,Ta+=o*(i+t)/2,Ra+=o,o=i*n-r*t,Da+=o*(r+n),Pa+=o*(i+t),Ua+=3*o,Gt(r=n,i=t)}var t,e,r,i;Wa.point=function(u,o){Wa.point=n,Gt(t=r=u,e=i=o)},Wa.lineEnd=function(){n(t,e)}}function te(n){function t(t,e){n.moveTo(t+o,e),n.arc(t,e,o,0,Ho)}function e(t,e){n.moveTo(t,e),a.point=r}function r(t,e){n.lineTo(t,e)}function i(){a.point=t}function u(){n.closePath()}var o=4.5,a={point:t,lineStart:function(){a.point=e},lineEnd:i,polygonStart:function(){a.lineEnd=u},polygonEnd:function(){a.lineEnd=i,a.point=t},pointRadius:function(n){return o=n,a},result:b};return a}function ee(n){function t(n){return(a?r:e)(n)}function e(t){return ue(t,function(e,r){e=n(e,r),t.point(e[0],e[1])})}function r(t){function e(e,r){e=n(e,r),t.point(e[0],e[1])}function r(){M=NaN,S.point=u,t.lineStart()}function u(e,r){var u=dt([e,r]),o=n(e,r);i(M,x,m,b,_,w,M=o[0],x=o[1],m=e,b=u[0],_=u[1],w=u[2],a,t),t.point(M,x)}function o(){S.point=e,t.lineEnd()}function l(){
+r(),S.point=c,S.lineEnd=f}function c(n,t){u(s=n,h=t),p=M,g=x,v=b,d=_,y=w,S.point=u}function f(){i(M,x,m,b,_,w,p,g,s,v,d,y,a,t),S.lineEnd=o,o()}var s,h,p,g,v,d,y,m,M,x,b,_,w,S={point:e,lineStart:r,lineEnd:o,polygonStart:function(){t.polygonStart(),S.lineStart=l},polygonEnd:function(){t.polygonEnd(),S.lineStart=r}};return S}function i(t,e,r,a,l,c,f,s,h,p,g,v,d,y){var m=f-t,M=s-e,x=m*m+M*M;if(x>4*u&&d--){var b=a+p,_=l+g,w=c+v,S=Math.sqrt(b*b+_*_+w*w),k=Math.asin(w/=S),N=xo(xo(w)-1)<Uo||xo(r-h)<Uo?(r+h)/2:Math.atan2(_,b),E=n(N,k),A=E[0],C=E[1],z=A-t,L=C-e,q=M*z-m*L;(q*q/x>u||xo((m*z+M*L)/x-.5)>.3||o>a*p+l*g+c*v)&&(i(t,e,r,a,l,c,A,C,N,b/=S,_/=S,w,d,y),y.point(A,C),i(A,C,N,b,_,w,f,s,h,p,g,v,d,y))}}var u=.5,o=Math.cos(30*Yo),a=16;return t.precision=function(n){return arguments.length?(a=(u=n*n)>0&&16,t):Math.sqrt(u)},t}function re(n){var t=ee(function(t,e){return n([t*Zo,e*Zo])});return function(n){return le(t(n))}}function ie(n){this.stream=n}function ue(n,t){return{point:t,sphere:function(){n.sphere()},lineStart:function(){n.lineStart()},lineEnd:function(){n.lineEnd()},polygonStart:function(){n.polygonStart()},polygonEnd:function(){n.polygonEnd()}}}function oe(n){return ae(function(){return n})()}function ae(n){function t(n){return n=a(n[0]*Yo,n[1]*Yo),[n[0]*h+l,c-n[1]*h]}function e(n){return n=a.invert((n[0]-l)/h,(c-n[1])/h),n&&[n[0]*Zo,n[1]*Zo]}function r(){a=Ct(o=se(y,M,x),u);var n=u(v,d);return l=p-n[0]*h,c=g+n[1]*h,i()}function i(){return f&&(f.valid=!1,f=null),t}var u,o,a,l,c,f,s=ee(function(n,t){return n=u(n,t),[n[0]*h+l,c-n[1]*h]}),h=150,p=480,g=250,v=0,d=0,y=0,M=0,x=0,b=Fa,_=m,w=null,S=null;return t.stream=function(n){return f&&(f.valid=!1),f=le(b(o,s(_(n)))),f.valid=!0,f},t.clipAngle=function(n){return arguments.length?(b=null==n?(w=n,Fa):It((w=+n)*Yo),i()):w},t.clipExtent=function(n){return arguments.length?(S=n,_=n?Zt(n[0][0],n[0][1],n[1][0],n[1][1]):m,i()):S},t.scale=function(n){return arguments.length?(h=+n,r()):h},t.translate=function(n){return arguments.length?(p=+n[0],g=+n[1],r()):[p,g]},t.center=function(n){return arguments.length?(v=n[0]%360*Yo,d=n[1]%360*Yo,r()):[v*Zo,d*Zo]},t.rotate=function(n){return arguments.length?(y=n[0]%360*Yo,M=n[1]%360*Yo,x=n.length>2?n[2]%360*Yo:0,r()):[y*Zo,M*Zo,x*Zo]},ao.rebind(t,s,"precision"),function(){return u=n.apply(this,arguments),t.invert=u.invert&&e,r()}}function le(n){return ue(n,function(t,e){n.point(t*Yo,e*Yo)})}function ce(n,t){return[n,t]}function fe(n,t){return[n>Fo?n-Ho:-Fo>n?n+Ho:n,t]}function se(n,t,e){return n?t||e?Ct(pe(n),ge(t,e)):pe(n):t||e?ge(t,e):fe}function he(n){return function(t,e){return t+=n,[t>Fo?t-Ho:-Fo>t?t+Ho:t,e]}}function pe(n){var t=he(n);return t.invert=he(-n),t}function ge(n,t){function e(n,t){var e=Math.cos(t),a=Math.cos(n)*e,l=Math.sin(n)*e,c=Math.sin(t),f=c*r+a*i;return[Math.atan2(l*u-f*o,a*r-c*i),tn(f*u+l*o)]}var r=Math.cos(n),i=Math.sin(n),u=Math.cos(t),o=Math.sin(t);return e.invert=function(n,t){var e=Math.cos(t),a=Math.cos(n)*e,l=Math.sin(n)*e,c=Math.sin(t),f=c*u-l*o;return[Math.atan2(l*u+c*o,a*r+f*i),tn(f*r-a*i)]},e}function ve(n,t){var e=Math.cos(n),r=Math.sin(n);return function(i,u,o,a){var l=o*t;null!=i?(i=de(e,i),u=de(e,u),(o>0?u>i:i>u)&&(i+=o*Ho)):(i=n+o*Ho,u=n-.5*l);for(var c,f=i;o>0?f>u:u>f;f-=l)a.point((c=_t([e,-r*Math.cos(f),-r*Math.sin(f)]))[0],c[1])}}function de(n,t){var e=dt(t);e[0]-=n,bt(e);var r=nn(-e[1]);return((-e[2]<0?-r:r)+2*Math.PI-Uo)%(2*Math.PI)}function ye(n,t,e){var r=ao.range(n,t-Uo,e).concat(t);return function(n){return r.map(function(t){return[n,t]})}}function me(n,t,e){var r=ao.range(n,t-Uo,e).concat(t);return function(n){return r.map(function(t){return[t,n]})}}function Me(n){return n.source}function xe(n){return n.target}function be(n,t,e,r){var i=Math.cos(t),u=Math.sin(t),o=Math.cos(r),a=Math.sin(r),l=i*Math.cos(n),c=i*Math.sin(n),f=o*Math.cos(e),s=o*Math.sin(e),h=2*Math.asin(Math.sqrt(on(r-t)+i*o*on(e-n))),p=1/Math.sin(h),g=h?function(n){var t=Math.sin(n*=h)*p,e=Math.sin(h-n)*p,r=e*l+t*f,i=e*c+t*s,o=e*u+t*a;return[Math.atan2(i,r)*Zo,Math.atan2(o,Math.sqrt(r*r+i*i))*Zo]}:function(){return[n*Zo,t*Zo]};return g.distance=h,g}function _e(){function n(n,i){var u=Math.sin(i*=Yo),o=Math.cos(i),a=xo((n*=Yo)-t),l=Math.cos(a);Ja+=Math.atan2(Math.sqrt((a=o*Math.sin(a))*a+(a=r*u-e*o*l)*a),e*u+r*o*l),t=n,e=u,r=o}var t,e,r;Ga.point=function(i,u){t=i*Yo,e=Math.sin(u*=Yo),r=Math.cos(u),Ga.point=n},Ga.lineEnd=function(){Ga.point=Ga.lineEnd=b}}function we(n,t){function e(t,e){var r=Math.cos(t),i=Math.cos(e),u=n(r*i);return[u*i*Math.sin(t),u*Math.sin(e)]}return e.invert=function(n,e){var r=Math.sqrt(n*n+e*e),i=t(r),u=Math.sin(i),o=Math.cos(i);return[Math.atan2(n*u,r*o),Math.asin(r&&e*u/r)]},e}function Se(n,t){function e(n,t){o>0?-Io+Uo>t&&(t=-Io+Uo):t>Io-Uo&&(t=Io-Uo);var e=o/Math.pow(i(t),u);return[e*Math.sin(u*n),o-e*Math.cos(u*n)]}var r=Math.cos(n),i=function(n){return Math.tan(Fo/4+n/2)},u=n===t?Math.sin(n):Math.log(r/Math.cos(t))/Math.log(i(t)/i(n)),o=r*Math.pow(i(n),u)/u;return u?(e.invert=function(n,t){var e=o-t,r=K(u)*Math.sqrt(n*n+e*e);return[Math.atan2(n,e)/u,2*Math.atan(Math.pow(o/r,1/u))-Io]},e):Ne}function ke(n,t){function e(n,t){var e=u-t;return[e*Math.sin(i*n),u-e*Math.cos(i*n)]}var r=Math.cos(n),i=n===t?Math.sin(n):(r-Math.cos(t))/(t-n),u=r/i+n;return xo(i)<Uo?ce:(e.invert=function(n,t){var e=u-t;return[Math.atan2(n,e)/i,u-K(i)*Math.sqrt(n*n+e*e)]},e)}function Ne(n,t){return[n,Math.log(Math.tan(Fo/4+t/2))]}function Ee(n){var t,e=oe(n),r=e.scale,i=e.translate,u=e.clipExtent;return e.scale=function(){var n=r.apply(e,arguments);return n===e?t?e.clipExtent(null):e:n},e.translate=function(){var n=i.apply(e,arguments);return n===e?t?e.clipExtent(null):e:n},e.clipExtent=function(n){var o=u.apply(e,arguments);if(o===e){if(t=null==n){var a=Fo*r(),l=i();u([[l[0]-a,l[1]-a],[l[0]+a,l[1]+a]])}}else t&&(o=null);return o},e.clipExtent(null)}function Ae(n,t){return[Math.log(Math.tan(Fo/4+t/2)),-n]}function Ce(n){return n[0]}function ze(n){return n[1]}function Le(n){for(var t=n.length,e=[0,1],r=2,i=2;t>i;i++){for(;r>1&&Q(n[e[r-2]],n[e[r-1]],n[i])<=0;)--r;e[r++]=i}return e.slice(0,r)}function qe(n,t){return n[0]-t[0]||n[1]-t[1]}function Te(n,t,e){return(e[0]-t[0])*(n[1]-t[1])<(e[1]-t[1])*(n[0]-t[0])}function Re(n,t,e,r){var i=n[0],u=e[0],o=t[0]-i,a=r[0]-u,l=n[1],c=e[1],f=t[1]-l,s=r[1]-c,h=(a*(l-c)-s*(i-u))/(s*o-a*f);return[i+h*o,l+h*f]}function De(n){var t=n[0],e=n[n.length-1];return!(t[0]-e[0]||t[1]-e[1])}function Pe(){rr(this),this.edge=this.site=this.circle=null}function Ue(n){var t=cl.pop()||new Pe;return t.site=n,t}function je(n){Be(n),ol.remove(n),cl.push(n),rr(n)}function Fe(n){var t=n.circle,e=t.x,r=t.cy,i={x:e,y:r},u=n.P,o=n.N,a=[n];je(n);for(var l=u;l.circle&&xo(e-l.circle.x)<Uo&&xo(r-l.circle.cy)<Uo;)u=l.P,a.unshift(l),je(l),l=u;a.unshift(l),Be(l);for(var c=o;c.circle&&xo(e-c.circle.x)<Uo&&xo(r-c.circle.cy)<Uo;)o=c.N,a.push(c),je(c),c=o;a.push(c),Be(c);var f,s=a.length;for(f=1;s>f;++f)c=a[f],l=a[f-1],nr(c.edge,l.site,c.site,i);l=a[0],c=a[s-1],c.edge=Ke(l.site,c.site,null,i),$e(l),$e(c)}function He(n){for(var t,e,r,i,u=n.x,o=n.y,a=ol._;a;)if(r=Oe(a,o)-u,r>Uo)a=a.L;else{if(i=u-Ie(a,o),!(i>Uo)){r>-Uo?(t=a.P,e=a):i>-Uo?(t=a,e=a.N):t=e=a;break}if(!a.R){t=a;break}a=a.R}var l=Ue(n);if(ol.insert(t,l),t||e){if(t===e)return Be(t),e=Ue(t.site),ol.insert(l,e),l.edge=e.edge=Ke(t.site,l.site),$e(t),void $e(e);if(!e)return void(l.edge=Ke(t.site,l.site));Be(t),Be(e);var c=t.site,f=c.x,s=c.y,h=n.x-f,p=n.y-s,g=e.site,v=g.x-f,d=g.y-s,y=2*(h*d-p*v),m=h*h+p*p,M=v*v+d*d,x={x:(d*m-p*M)/y+f,y:(h*M-v*m)/y+s};nr(e.edge,c,g,x),l.edge=Ke(c,n,null,x),e.edge=Ke(n,g,null,x),$e(t),$e(e)}}function Oe(n,t){var e=n.site,r=e.x,i=e.y,u=i-t;if(!u)return r;var o=n.P;if(!o)return-(1/0);e=o.site;var a=e.x,l=e.y,c=l-t;if(!c)return a;var f=a-r,s=1/u-1/c,h=f/c;return s?(-h+Math.sqrt(h*h-2*s*(f*f/(-2*c)-l+c/2+i-u/2)))/s+r:(r+a)/2}function Ie(n,t){var e=n.N;if(e)return Oe(e,t);var r=n.site;return r.y===t?r.x:1/0}function Ye(n){this.site=n,this.edges=[]}function Ze(n){for(var t,e,r,i,u,o,a,l,c,f,s=n[0][0],h=n[1][0],p=n[0][1],g=n[1][1],v=ul,d=v.length;d--;)if(u=v[d],u&&u.prepare())for(a=u.edges,l=a.length,o=0;l>o;)f=a[o].end(),r=f.x,i=f.y,c=a[++o%l].start(),t=c.x,e=c.y,(xo(r-t)>Uo||xo(i-e)>Uo)&&(a.splice(o,0,new tr(Qe(u.site,f,xo(r-s)<Uo&&g-i>Uo?{x:s,y:xo(t-s)<Uo?e:g}:xo(i-g)<Uo&&h-r>Uo?{x:xo(e-g)<Uo?t:h,y:g}:xo(r-h)<Uo&&i-p>Uo?{x:h,y:xo(t-h)<Uo?e:p}:xo(i-p)<Uo&&r-s>Uo?{x:xo(e-p)<Uo?t:s,y:p}:null),u.site,null)),++l)}function Ve(n,t){return t.angle-n.angle}function Xe(){rr(this),this.x=this.y=this.arc=this.site=this.cy=null}function $e(n){var t=n.P,e=n.N;if(t&&e){var r=t.site,i=n.site,u=e.site;if(r!==u){var o=i.x,a=i.y,l=r.x-o,c=r.y-a,f=u.x-o,s=u.y-a,h=2*(l*s-c*f);if(!(h>=-jo)){var p=l*l+c*c,g=f*f+s*s,v=(s*p-c*g)/h,d=(l*g-f*p)/h,s=d+a,y=fl.pop()||new Xe;y.arc=n,y.site=i,y.x=v+o,y.y=s+Math.sqrt(v*v+d*d),y.cy=s,n.circle=y;for(var m=null,M=ll._;M;)if(y.y<M.y||y.y===M.y&&y.x<=M.x){if(!M.L){m=M.P;break}M=M.L}else{if(!M.R){m=M;break}M=M.R}ll.insert(m,y),m||(al=y)}}}}function Be(n){var t=n.circle;t&&(t.P||(al=t.N),ll.remove(t),fl.push(t),rr(t),n.circle=null)}function We(n){for(var t,e=il,r=Yt(n[0][0],n[0][1],n[1][0],n[1][1]),i=e.length;i--;)t=e[i],(!Je(t,n)||!r(t)||xo(t.a.x-t.b.x)<Uo&&xo(t.a.y-t.b.y)<Uo)&&(t.a=t.b=null,e.splice(i,1))}function Je(n,t){var e=n.b;if(e)return!0;var r,i,u=n.a,o=t[0][0],a=t[1][0],l=t[0][1],c=t[1][1],f=n.l,s=n.r,h=f.x,p=f.y,g=s.x,v=s.y,d=(h+g)/2,y=(p+v)/2;if(v===p){if(o>d||d>=a)return;if(h>g){if(u){if(u.y>=c)return}else u={x:d,y:l};e={x:d,y:c}}else{if(u){if(u.y<l)return}else u={x:d,y:c};e={x:d,y:l}}}else if(r=(h-g)/(v-p),i=y-r*d,-1>r||r>1)if(h>g){if(u){if(u.y>=c)return}else u={x:(l-i)/r,y:l};e={x:(c-i)/r,y:c}}else{if(u){if(u.y<l)return}else u={x:(c-i)/r,y:c};e={x:(l-i)/r,y:l}}else if(v>p){if(u){if(u.x>=a)return}else u={x:o,y:r*o+i};e={x:a,y:r*a+i}}else{if(u){if(u.x<o)return}else u={x:a,y:r*a+i};e={x:o,y:r*o+i}}return n.a=u,n.b=e,!0}function Ge(n,t){this.l=n,this.r=t,this.a=this.b=null}function Ke(n,t,e,r){var i=new Ge(n,t);return il.push(i),e&&nr(i,n,t,e),r&&nr(i,t,n,r),ul[n.i].edges.push(new tr(i,n,t)),ul[t.i].edges.push(new tr(i,t,n)),i}function Qe(n,t,e){var r=new Ge(n,null);return r.a=t,r.b=e,il.push(r),r}function nr(n,t,e,r){n.a||n.b?n.l===e?n.b=r:n.a=r:(n.a=r,n.l=t,n.r=e)}function tr(n,t,e){var r=n.a,i=n.b;this.edge=n,this.site=t,this.angle=e?Math.atan2(e.y-t.y,e.x-t.x):n.l===t?Math.atan2(i.x-r.x,r.y-i.y):Math.atan2(r.x-i.x,i.y-r.y)}function er(){this._=null}function rr(n){n.U=n.C=n.L=n.R=n.P=n.N=null}function ir(n,t){var e=t,r=t.R,i=e.U;i?i.L===e?i.L=r:i.R=r:n._=r,r.U=i,e.U=r,e.R=r.L,e.R&&(e.R.U=e),r.L=e}function ur(n,t){var e=t,r=t.L,i=e.U;i?i.L===e?i.L=r:i.R=r:n._=r,r.U=i,e.U=r,e.L=r.R,e.L&&(e.L.U=e),r.R=e}function or(n){for(;n.L;)n=n.L;return n}function ar(n,t){var e,r,i,u=n.sort(lr).pop();for(il=[],ul=new Array(n.length),ol=new er,ll=new er;;)if(i=al,u&&(!i||u.y<i.y||u.y===i.y&&u.x<i.x))u.x===e&&u.y===r||(ul[u.i]=new Ye(u),He(u),e=u.x,r=u.y),u=n.pop();else{if(!i)break;Fe(i.arc)}t&&(We(t),Ze(t));var o={cells:ul,edges:il};return ol=ll=il=ul=null,o}function lr(n,t){return t.y-n.y||t.x-n.x}function cr(n,t,e){return(n.x-e.x)*(t.y-n.y)-(n.x-t.x)*(e.y-n.y)}function fr(n){return n.x}function sr(n){return n.y}function hr(){return{leaf:!0,nodes:[],point:null,x:null,y:null}}function pr(n,t,e,r,i,u){if(!n(t,e,r,i,u)){var o=.5*(e+i),a=.5*(r+u),l=t.nodes;l[0]&&pr(n,l[0],e,r,o,a),l[1]&&pr(n,l[1],o,r,i,a),l[2]&&pr(n,l[2],e,a,o,u),l[3]&&pr(n,l[3],o,a,i,u)}}function gr(n,t,e,r,i,u,o){var a,l=1/0;return function c(n,f,s,h,p){if(!(f>u||s>o||r>h||i>p)){if(g=n.point){var g,v=t-n.x,d=e-n.y,y=v*v+d*d;if(l>y){var m=Math.sqrt(l=y);r=t-m,i=e-m,u=t+m,o=e+m,a=g}}for(var M=n.nodes,x=.5*(f+h),b=.5*(s+p),_=t>=x,w=e>=b,S=w<<1|_,k=S+4;k>S;++S)if(n=M[3&S])switch(3&S){case 0:c(n,f,s,x,b);break;case 1:c(n,x,s,h,b);break;case 2:c(n,f,b,x,p);break;case 3:c(n,x,b,h,p)}}}(n,r,i,u,o),a}function vr(n,t){n=ao.rgb(n),t=ao.rgb(t);var e=n.r,r=n.g,i=n.b,u=t.r-e,o=t.g-r,a=t.b-i;return function(n){return"#"+bn(Math.round(e+u*n))+bn(Math.round(r+o*n))+bn(Math.round(i+a*n))}}function dr(n,t){var e,r={},i={};for(e in n)e in t?r[e]=Mr(n[e],t[e]):i[e]=n[e];for(e in t)e in n||(i[e]=t[e]);return function(n){for(e in r)i[e]=r[e](n);return i}}function yr(n,t){return n=+n,t=+t,function(e){return n*(1-e)+t*e}}function mr(n,t){var e,r,i,u=hl.lastIndex=pl.lastIndex=0,o=-1,a=[],l=[];for(n+="",t+="";(e=hl.exec(n))&&(r=pl.exec(t));)(i=r.index)>u&&(i=t.slice(u,i),a[o]?a[o]+=i:a[++o]=i),(e=e[0])===(r=r[0])?a[o]?a[o]+=r:a[++o]=r:(a[++o]=null,l.push({i:o,x:yr(e,r)})),u=pl.lastIndex;return u<t.length&&(i=t.slice(u),a[o]?a[o]+=i:a[++o]=i),a.length<2?l[0]?(t=l[0].x,function(n){return t(n)+""}):function(){return t}:(t=l.length,function(n){for(var e,r=0;t>r;++r)a[(e=l[r]).i]=e.x(n);return a.join("")})}function Mr(n,t){for(var e,r=ao.interpolators.length;--r>=0&&!(e=ao.interpolators[r](n,t)););return e}function xr(n,t){var e,r=[],i=[],u=n.length,o=t.length,a=Math.min(n.length,t.length);for(e=0;a>e;++e)r.push(Mr(n[e],t[e]));for(;u>e;++e)i[e]=n[e];for(;o>e;++e)i[e]=t[e];return function(n){for(e=0;a>e;++e)i[e]=r[e](n);return i}}function br(n){return function(t){return 0>=t?0:t>=1?1:n(t)}}function _r(n){return function(t){return 1-n(1-t)}}function wr(n){return function(t){return.5*(.5>t?n(2*t):2-n(2-2*t))}}function Sr(n){return n*n}function kr(n){return n*n*n}function Nr(n){if(0>=n)return 0;if(n>=1)return 1;var t=n*n,e=t*n;return 4*(.5>n?e:3*(n-t)+e-.75)}function Er(n){return function(t){return Math.pow(t,n)}}function Ar(n){return 1-Math.cos(n*Io)}function Cr(n){return Math.pow(2,10*(n-1))}function zr(n){return 1-Math.sqrt(1-n*n)}function Lr(n,t){var e;return arguments.length<2&&(t=.45),arguments.length?e=t/Ho*Math.asin(1/n):(n=1,e=t/4),function(r){return 1+n*Math.pow(2,-10*r)*Math.sin((r-e)*Ho/t)}}function qr(n){return n||(n=1.70158),function(t){return t*t*((n+1)*t-n)}}function Tr(n){return 1/2.75>n?7.5625*n*n:2/2.75>n?7.5625*(n-=1.5/2.75)*n+.75:2.5/2.75>n?7.5625*(n-=2.25/2.75)*n+.9375:7.5625*(n-=2.625/2.75)*n+.984375}function Rr(n,t){n=ao.hcl(n),t=ao.hcl(t);var e=n.h,r=n.c,i=n.l,u=t.h-e,o=t.c-r,a=t.l-i;return isNaN(o)&&(o=0,r=isNaN(r)?t.c:r),isNaN(u)?(u=0,e=isNaN(e)?t.h:e):u>180?u-=360:-180>u&&(u+=360),function(n){return sn(e+u*n,r+o*n,i+a*n)+""}}function Dr(n,t){n=ao.hsl(n),t=ao.hsl(t);var e=n.h,r=n.s,i=n.l,u=t.h-e,o=t.s-r,a=t.l-i;return isNaN(o)&&(o=0,r=isNaN(r)?t.s:r),isNaN(u)?(u=0,e=isNaN(e)?t.h:e):u>180?u-=360:-180>u&&(u+=360),function(n){return cn(e+u*n,r+o*n,i+a*n)+""}}function Pr(n,t){n=ao.lab(n),t=ao.lab(t);var e=n.l,r=n.a,i=n.b,u=t.l-e,o=t.a-r,a=t.b-i;return function(n){return pn(e+u*n,r+o*n,i+a*n)+""}}function Ur(n,t){return t-=n,function(e){return Math.round(n+t*e)}}function jr(n){var t=[n.a,n.b],e=[n.c,n.d],r=Hr(t),i=Fr(t,e),u=Hr(Or(e,t,-i))||0;t[0]*e[1]<e[0]*t[1]&&(t[0]*=-1,t[1]*=-1,r*=-1,i*=-1),this.rotate=(r?Math.atan2(t[1],t[0]):Math.atan2(-e[0],e[1]))*Zo,this.translate=[n.e,n.f],this.scale=[r,u],this.skew=u?Math.atan2(i,u)*Zo:0}function Fr(n,t){return n[0]*t[0]+n[1]*t[1]}function Hr(n){var t=Math.sqrt(Fr(n,n));return t&&(n[0]/=t,n[1]/=t),t}function Or(n,t,e){return n[0]+=e*t[0],n[1]+=e*t[1],n}function Ir(n){return n.length?n.pop()+",":""}function Yr(n,t,e,r){if(n[0]!==t[0]||n[1]!==t[1]){var i=e.push("translate(",null,",",null,")");r.push({i:i-4,x:yr(n[0],t[0])},{i:i-2,x:yr(n[1],t[1])})}else(t[0]||t[1])&&e.push("translate("+t+")")}function Zr(n,t,e,r){n!==t?(n-t>180?t+=360:t-n>180&&(n+=360),r.push({i:e.push(Ir(e)+"rotate(",null,")")-2,x:yr(n,t)})):t&&e.push(Ir(e)+"rotate("+t+")")}function Vr(n,t,e,r){n!==t?r.push({i:e.push(Ir(e)+"skewX(",null,")")-2,x:yr(n,t)}):t&&e.push(Ir(e)+"skewX("+t+")")}function Xr(n,t,e,r){if(n[0]!==t[0]||n[1]!==t[1]){var i=e.push(Ir(e)+"scale(",null,",",null,")");r.push({i:i-4,x:yr(n[0],t[0])},{i:i-2,x:yr(n[1],t[1])})}else 1===t[0]&&1===t[1]||e.push(Ir(e)+"scale("+t+")")}function $r(n,t){var e=[],r=[];return n=ao.transform(n),t=ao.transform(t),Yr(n.translate,t.translate,e,r),Zr(n.rotate,t.rotate,e,r),Vr(n.skew,t.skew,e,r),Xr(n.scale,t.scale,e,r),n=t=null,function(n){for(var t,i=-1,u=r.length;++i<u;)e[(t=r[i]).i]=t.x(n);return e.join("")}}function Br(n,t){return t=(t-=n=+n)||1/t,function(e){return(e-n)/t}}function Wr(n,t){return t=(t-=n=+n)||1/t,function(e){return Math.max(0,Math.min(1,(e-n)/t))}}function Jr(n){for(var t=n.source,e=n.target,r=Kr(t,e),i=[t];t!==r;)t=t.parent,i.push(t);for(var u=i.length;e!==r;)i.splice(u,0,e),e=e.parent;return i}function Gr(n){for(var t=[],e=n.parent;null!=e;)t.push(n),n=e,e=e.parent;return t.push(n),t}function Kr(n,t){if(n===t)return n;for(var e=Gr(n),r=Gr(t),i=e.pop(),u=r.pop(),o=null;i===u;)o=i,i=e.pop(),u=r.pop();return o}function Qr(n){n.fixed|=2}function ni(n){n.fixed&=-7}function ti(n){n.fixed|=4,n.px=n.x,n.py=n.y}function ei(n){n.fixed&=-5}function ri(n,t,e){var r=0,i=0;if(n.charge=0,!n.leaf)for(var u,o=n.nodes,a=o.length,l=-1;++l<a;)u=o[l],null!=u&&(ri(u,t,e),n.charge+=u.charge,r+=u.charge*u.cx,i+=u.charge*u.cy);if(n.point){n.leaf||(n.point.x+=Math.random()-.5,n.point.y+=Math.random()-.5);var c=t*e[n.point.index];n.charge+=n.pointCharge=c,r+=c*n.point.x,i+=c*n.point.y}n.cx=r/n.charge,n.cy=i/n.charge}function ii(n,t){return ao.rebind(n,t,"sort","children","value"),n.nodes=n,n.links=fi,n}function ui(n,t){for(var e=[n];null!=(n=e.pop());)if(t(n),(i=n.children)&&(r=i.length))for(var r,i;--r>=0;)e.push(i[r])}function oi(n,t){for(var e=[n],r=[];null!=(n=e.pop());)if(r.push(n),(u=n.children)&&(i=u.length))for(var i,u,o=-1;++o<i;)e.push(u[o]);for(;null!=(n=r.pop());)t(n)}function ai(n){return n.children}function li(n){return n.value}function ci(n,t){return t.value-n.value}function fi(n){return ao.merge(n.map(function(n){return(n.children||[]).map(function(t){return{source:n,target:t}})}))}function si(n){return n.x}function hi(n){return n.y}function pi(n,t,e){n.y0=t,n.y=e}function gi(n){return ao.range(n.length)}function vi(n){for(var t=-1,e=n[0].length,r=[];++t<e;)r[t]=0;return r}function di(n){for(var t,e=1,r=0,i=n[0][1],u=n.length;u>e;++e)(t=n[e][1])>i&&(r=e,i=t);return r}function yi(n){return n.reduce(mi,0)}function mi(n,t){return n+t[1]}function Mi(n,t){return xi(n,Math.ceil(Math.log(t.length)/Math.LN2+1))}function xi(n,t){for(var e=-1,r=+n[0],i=(n[1]-r)/t,u=[];++e<=t;)u[e]=i*e+r;return u}function bi(n){return[ao.min(n),ao.max(n)]}function _i(n,t){return n.value-t.value}function wi(n,t){var e=n._pack_next;n._pack_next=t,t._pack_prev=n,t._pack_next=e,e._pack_prev=t}function Si(n,t){n._pack_next=t,t._pack_prev=n}function ki(n,t){var e=t.x-n.x,r=t.y-n.y,i=n.r+t.r;return.999*i*i>e*e+r*r}function Ni(n){function t(n){f=Math.min(n.x-n.r,f),s=Math.max(n.x+n.r,s),h=Math.min(n.y-n.r,h),p=Math.max(n.y+n.r,p)}if((e=n.children)&&(c=e.length)){var e,r,i,u,o,a,l,c,f=1/0,s=-(1/0),h=1/0,p=-(1/0);if(e.forEach(Ei),r=e[0],r.x=-r.r,r.y=0,t(r),c>1&&(i=e[1],i.x=i.r,i.y=0,t(i),c>2))for(u=e[2],zi(r,i,u),t(u),wi(r,u),r._pack_prev=u,wi(u,i),i=r._pack_next,o=3;c>o;o++){zi(r,i,u=e[o]);var g=0,v=1,d=1;for(a=i._pack_next;a!==i;a=a._pack_next,v++)if(ki(a,u)){g=1;break}if(1==g)for(l=r._pack_prev;l!==a._pack_prev&&!ki(l,u);l=l._pack_prev,d++);g?(d>v||v==d&&i.r<r.r?Si(r,i=a):Si(r=l,i),o--):(wi(r,u),i=u,t(u))}var y=(f+s)/2,m=(h+p)/2,M=0;for(o=0;c>o;o++)u=e[o],u.x-=y,u.y-=m,M=Math.max(M,u.r+Math.sqrt(u.x*u.x+u.y*u.y));n.r=M,e.forEach(Ai)}}function Ei(n){n._pack_next=n._pack_prev=n}function Ai(n){delete n._pack_next,delete n._pack_prev}function Ci(n,t,e,r){var i=n.children;if(n.x=t+=r*n.x,n.y=e+=r*n.y,n.r*=r,i)for(var u=-1,o=i.length;++u<o;)Ci(i[u],t,e,r)}function zi(n,t,e){var r=n.r+e.r,i=t.x-n.x,u=t.y-n.y;if(r&&(i||u)){var o=t.r+e.r,a=i*i+u*u;o*=o,r*=r;var l=.5+(r-o)/(2*a),c=Math.sqrt(Math.max(0,2*o*(r+a)-(r-=a)*r-o*o))/(2*a);e.x=n.x+l*i+c*u,e.y=n.y+l*u-c*i}else e.x=n.x+r,e.y=n.y}function Li(n,t){return n.parent==t.parent?1:2}function qi(n){var t=n.children;return t.length?t[0]:n.t}function Ti(n){var t,e=n.children;return(t=e.length)?e[t-1]:n.t}function Ri(n,t,e){var r=e/(t.i-n.i);t.c-=r,t.s+=e,n.c+=r,t.z+=e,t.m+=e}function Di(n){for(var t,e=0,r=0,i=n.children,u=i.length;--u>=0;)t=i[u],t.z+=e,t.m+=e,e+=t.s+(r+=t.c)}function Pi(n,t,e){return n.a.parent===t.parent?n.a:e}function Ui(n){return 1+ao.max(n,function(n){return n.y})}function ji(n){return n.reduce(function(n,t){return n+t.x},0)/n.length}function Fi(n){var t=n.children;return t&&t.length?Fi(t[0]):n}function Hi(n){var t,e=n.children;return e&&(t=e.length)?Hi(e[t-1]):n}function Oi(n){return{x:n.x,y:n.y,dx:n.dx,dy:n.dy}}function Ii(n,t){var e=n.x+t[3],r=n.y+t[0],i=n.dx-t[1]-t[3],u=n.dy-t[0]-t[2];return 0>i&&(e+=i/2,i=0),0>u&&(r+=u/2,u=0),{x:e,y:r,dx:i,dy:u}}function Yi(n){var t=n[0],e=n[n.length-1];return e>t?[t,e]:[e,t]}function Zi(n){return n.rangeExtent?n.rangeExtent():Yi(n.range())}function Vi(n,t,e,r){var i=e(n[0],n[1]),u=r(t[0],t[1]);return function(n){return u(i(n))}}function Xi(n,t){var e,r=0,i=n.length-1,u=n[r],o=n[i];return u>o&&(e=r,r=i,i=e,e=u,u=o,o=e),n[r]=t.floor(u),n[i]=t.ceil(o),n}function $i(n){return n?{floor:function(t){return Math.floor(t/n)*n},ceil:function(t){return Math.ceil(t/n)*n}}:Sl}function Bi(n,t,e,r){var i=[],u=[],o=0,a=Math.min(n.length,t.length)-1;for(n[a]<n[0]&&(n=n.slice().reverse(),t=t.slice().reverse());++o<=a;)i.push(e(n[o-1],n[o])),u.push(r(t[o-1],t[o]));return function(t){var e=ao.bisect(n,t,1,a)-1;return u[e](i[e](t))}}function Wi(n,t,e,r){function i(){var i=Math.min(n.length,t.length)>2?Bi:Vi,l=r?Wr:Br;return o=i(n,t,l,e),a=i(t,n,l,Mr),u}function u(n){return o(n)}var o,a;return u.invert=function(n){return a(n)},u.domain=function(t){return arguments.length?(n=t.map(Number),i()):n},u.range=function(n){return arguments.length?(t=n,i()):t},u.rangeRound=function(n){return u.range(n).interpolate(Ur)},u.clamp=function(n){return arguments.length?(r=n,i()):r},u.interpolate=function(n){return arguments.length?(e=n,i()):e},u.ticks=function(t){return Qi(n,t)},u.tickFormat=function(t,e){return nu(n,t,e)},u.nice=function(t){return Gi(n,t),i()},u.copy=function(){return Wi(n,t,e,r)},i()}function Ji(n,t){return ao.rebind(n,t,"range","rangeRound","interpolate","clamp")}function Gi(n,t){return Xi(n,$i(Ki(n,t)[2])),Xi(n,$i(Ki(n,t)[2])),n}function Ki(n,t){null==t&&(t=10);var e=Yi(n),r=e[1]-e[0],i=Math.pow(10,Math.floor(Math.log(r/t)/Math.LN10)),u=t/r*i;return.15>=u?i*=10:.35>=u?i*=5:.75>=u&&(i*=2),e[0]=Math.ceil(e[0]/i)*i,e[1]=Math.floor(e[1]/i)*i+.5*i,e[2]=i,e}function Qi(n,t){return ao.range.apply(ao,Ki(n,t))}function nu(n,t,e){var r=Ki(n,t);if(e){var i=ha.exec(e);if(i.shift(),"s"===i[8]){var u=ao.formatPrefix(Math.max(xo(r[0]),xo(r[1])));return i[7]||(i[7]="."+tu(u.scale(r[2]))),i[8]="f",e=ao.format(i.join("")),function(n){return e(u.scale(n))+u.symbol}}i[7]||(i[7]="."+eu(i[8],r)),e=i.join("")}else e=",."+tu(r[2])+"f";return ao.format(e)}function tu(n){return-Math.floor(Math.log(n)/Math.LN10+.01)}function eu(n,t){var e=tu(t[2]);return n in kl?Math.abs(e-tu(Math.max(xo(t[0]),xo(t[1]))))+ +("e"!==n):e-2*("%"===n)}function ru(n,t,e,r){function i(n){return(e?Math.log(0>n?0:n):-Math.log(n>0?0:-n))/Math.log(t)}function u(n){return e?Math.pow(t,n):-Math.pow(t,-n)}function o(t){return n(i(t))}return o.invert=function(t){return u(n.invert(t))},o.domain=function(t){return arguments.length?(e=t[0]>=0,n.domain((r=t.map(Number)).map(i)),o):r},o.base=function(e){return arguments.length?(t=+e,n.domain(r.map(i)),o):t},o.nice=function(){var t=Xi(r.map(i),e?Math:El);return n.domain(t),r=t.map(u),o},o.ticks=function(){var n=Yi(r),o=[],a=n[0],l=n[1],c=Math.floor(i(a)),f=Math.ceil(i(l)),s=t%1?2:t;if(isFinite(f-c)){if(e){for(;f>c;c++)for(var h=1;s>h;h++)o.push(u(c)*h);o.push(u(c))}else for(o.push(u(c));c++<f;)for(var h=s-1;h>0;h--)o.push(u(c)*h);for(c=0;o[c]<a;c++);for(f=o.length;o[f-1]>l;f--);o=o.slice(c,f)}return o},o.tickFormat=function(n,e){if(!arguments.length)return Nl;arguments.length<2?e=Nl:"function"!=typeof e&&(e=ao.format(e));var r=Math.max(1,t*n/o.ticks().length);return function(n){var o=n/u(Math.round(i(n)));return t-.5>o*t&&(o*=t),r>=o?e(n):""}},o.copy=function(){return ru(n.copy(),t,e,r)},Ji(o,n)}function iu(n,t,e){function r(t){return n(i(t))}var i=uu(t),u=uu(1/t);return r.invert=function(t){return u(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain((e=t.map(Number)).map(i)),r):e},r.ticks=function(n){return Qi(e,n)},r.tickFormat=function(n,t){return nu(e,n,t)},r.nice=function(n){return r.domain(Gi(e,n))},r.exponent=function(o){return arguments.length?(i=uu(t=o),u=uu(1/t),n.domain(e.map(i)),r):t},r.copy=function(){return iu(n.copy(),t,e)},Ji(r,n)}function uu(n){return function(t){return 0>t?-Math.pow(-t,n):Math.pow(t,n)}}function ou(n,t){function e(e){return u[((i.get(e)||("range"===t.t?i.set(e,n.push(e)):NaN))-1)%u.length]}function r(t,e){return ao.range(n.length).map(function(n){return t+e*n})}var i,u,o;return e.domain=function(r){if(!arguments.length)return n;n=[],i=new c;for(var u,o=-1,a=r.length;++o<a;)i.has(u=r[o])||i.set(u,n.push(u));return e[t.t].apply(e,t.a)},e.range=function(n){return arguments.length?(u=n,o=0,t={t:"range",a:arguments},e):u},e.rangePoints=function(i,a){arguments.length<2&&(a=0);var l=i[0],c=i[1],f=n.length<2?(l=(l+c)/2,0):(c-l)/(n.length-1+a);return u=r(l+f*a/2,f),o=0,t={t:"rangePoints",a:arguments},e},e.rangeRoundPoints=function(i,a){arguments.length<2&&(a=0);var l=i[0],c=i[1],f=n.length<2?(l=c=Math.round((l+c)/2),0):(c-l)/(n.length-1+a)|0;return u=r(l+Math.round(f*a/2+(c-l-(n.length-1+a)*f)/2),f),o=0,t={t:"rangeRoundPoints",a:arguments},e},e.rangeBands=function(i,a,l){arguments.length<2&&(a=0),arguments.length<3&&(l=a);var c=i[1]<i[0],f=i[c-0],s=i[1-c],h=(s-f)/(n.length-a+2*l);return u=r(f+h*l,h),c&&u.reverse(),o=h*(1-a),t={t:"rangeBands",a:arguments},e},e.rangeRoundBands=function(i,a,l){arguments.length<2&&(a=0),arguments.length<3&&(l=a);var c=i[1]<i[0],f=i[c-0],s=i[1-c],h=Math.floor((s-f)/(n.length-a+2*l));return u=r(f+Math.round((s-f-(n.length-a)*h)/2),h),c&&u.reverse(),o=Math.round(h*(1-a)),t={t:"rangeRoundBands",a:arguments},e},e.rangeBand=function(){return o},e.rangeExtent=function(){return Yi(t.a[0])},e.copy=function(){return ou(n,t)},e.domain(n)}function au(n,t){function u(){var e=0,r=t.length;for(a=[];++e<r;)a[e-1]=ao.quantile(n,e/r);return o}function o(n){return isNaN(n=+n)?void 0:t[ao.bisect(a,n)]}var a;return o.domain=function(t){return arguments.length?(n=t.map(r).filter(i).sort(e),u()):n},o.range=function(n){return arguments.length?(t=n,u()):t},o.quantiles=function(){return a},o.invertExtent=function(e){return e=t.indexOf(e),0>e?[NaN,NaN]:[e>0?a[e-1]:n[0],e<a.length?a[e]:n[n.length-1]]},o.copy=function(){return au(n,t)},u()}function lu(n,t,e){function r(t){return e[Math.max(0,Math.min(o,Math.floor(u*(t-n))))]}function i(){return u=e.length/(t-n),o=e.length-1,r}var u,o;return r.domain=function(e){return arguments.length?(n=+e[0],t=+e[e.length-1],i()):[n,t]},r.range=function(n){return arguments.length?(e=n,i()):e},r.invertExtent=function(t){return t=e.indexOf(t),t=0>t?NaN:t/u+n,[t,t+1/u]},r.copy=function(){return lu(n,t,e)},i()}function cu(n,t){function e(e){return e>=e?t[ao.bisect(n,e)]:void 0}return e.domain=function(t){return arguments.length?(n=t,e):n},e.range=function(n){return arguments.length?(t=n,e):t},e.invertExtent=function(e){return e=t.indexOf(e),[n[e-1],n[e]]},e.copy=function(){return cu(n,t)},e}function fu(n){function t(n){return+n}return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=e.map(t),t):n},t.ticks=function(t){return Qi(n,t)},t.tickFormat=function(t,e){return nu(n,t,e)},t.copy=function(){return fu(n)},t}function su(){return 0}function hu(n){return n.innerRadius}function pu(n){return n.outerRadius}function gu(n){return n.startAngle}function vu(n){return n.endAngle}function du(n){return n&&n.padAngle}function yu(n,t,e,r){return(n-e)*t-(t-r)*n>0?0:1}function mu(n,t,e,r,i){var u=n[0]-t[0],o=n[1]-t[1],a=(i?r:-r)/Math.sqrt(u*u+o*o),l=a*o,c=-a*u,f=n[0]+l,s=n[1]+c,h=t[0]+l,p=t[1]+c,g=(f+h)/2,v=(s+p)/2,d=h-f,y=p-s,m=d*d+y*y,M=e-r,x=f*p-h*s,b=(0>y?-1:1)*Math.sqrt(Math.max(0,M*M*m-x*x)),_=(x*y-d*b)/m,w=(-x*d-y*b)/m,S=(x*y+d*b)/m,k=(-x*d+y*b)/m,N=_-g,E=w-v,A=S-g,C=k-v;return N*N+E*E>A*A+C*C&&(_=S,w=k),[[_-l,w-c],[_*e/M,w*e/M]]}function Mu(n){function t(t){function o(){c.push("M",u(n(f),a))}for(var l,c=[],f=[],s=-1,h=t.length,p=En(e),g=En(r);++s<h;)i.call(this,l=t[s],s)?f.push([+p.call(this,l,s),+g.call(this,l,s)]):f.length&&(o(),f=[]);return f.length&&o(),c.length?c.join(""):null}var e=Ce,r=ze,i=zt,u=xu,o=u.key,a=.7;return t.x=function(n){return arguments.length?(e=n,t):e},t.y=function(n){return arguments.length?(r=n,t):r},t.defined=function(n){return arguments.length?(i=n,t):i},t.interpolate=function(n){return arguments.length?(o="function"==typeof n?u=n:(u=Tl.get(n)||xu).key,t):o},t.tension=function(n){return arguments.length?(a=n,t):a},t}function xu(n){return n.length>1?n.join("L"):n+"Z"}function bu(n){return n.join("L")+"Z"}function _u(n){for(var t=0,e=n.length,r=n[0],i=[r[0],",",r[1]];++t<e;)i.push("H",(r[0]+(r=n[t])[0])/2,"V",r[1]);return e>1&&i.push("H",r[0]),i.join("")}function wu(n){for(var t=0,e=n.length,r=n[0],i=[r[0],",",r[1]];++t<e;)i.push("V",(r=n[t])[1],"H",r[0]);return i.join("")}function Su(n){for(var t=0,e=n.length,r=n[0],i=[r[0],",",r[1]];++t<e;)i.push("H",(r=n[t])[0],"V",r[1]);return i.join("")}function ku(n,t){return n.length<4?xu(n):n[1]+Au(n.slice(1,-1),Cu(n,t))}function Nu(n,t){return n.length<3?bu(n):n[0]+Au((n.push(n[0]),n),Cu([n[n.length-2]].concat(n,[n[1]]),t))}function Eu(n,t){return n.length<3?xu(n):n[0]+Au(n,Cu(n,t))}function Au(n,t){if(t.length<1||n.length!=t.length&&n.length!=t.length+2)return xu(n);var e=n.length!=t.length,r="",i=n[0],u=n[1],o=t[0],a=o,l=1;if(e&&(r+="Q"+(u[0]-2*o[0]/3)+","+(u[1]-2*o[1]/3)+","+u[0]+","+u[1],i=n[1],l=2),t.length>1){a=t[1],u=n[l],l++,r+="C"+(i[0]+o[0])+","+(i[1]+o[1])+","+(u[0]-a[0])+","+(u[1]-a[1])+","+u[0]+","+u[1];for(var c=2;c<t.length;c++,l++)u=n[l],a=t[c],r+="S"+(u[0]-a[0])+","+(u[1]-a[1])+","+u[0]+","+u[1]}if(e){var f=n[l];r+="Q"+(u[0]+2*a[0]/3)+","+(u[1]+2*a[1]/3)+","+f[0]+","+f[1]}return r}function Cu(n,t){for(var e,r=[],i=(1-t)/2,u=n[0],o=n[1],a=1,l=n.length;++a<l;)e=u,u=o,o=n[a],r.push([i*(o[0]-e[0]),i*(o[1]-e[1])]);return r}function zu(n){if(n.length<3)return xu(n);var t=1,e=n.length,r=n[0],i=r[0],u=r[1],o=[i,i,i,(r=n[1])[0]],a=[u,u,u,r[1]],l=[i,",",u,"L",Ru(Pl,o),",",Ru(Pl,a)];for(n.push(n[e-1]);++t<=e;)r=n[t],o.shift(),o.push(r[0]),a.shift(),a.push(r[1]),Du(l,o,a);return n.pop(),l.push("L",r),l.join("")}function Lu(n){if(n.length<4)return xu(n);for(var t,e=[],r=-1,i=n.length,u=[0],o=[0];++r<3;)t=n[r],u.push(t[0]),o.push(t[1]);for(e.push(Ru(Pl,u)+","+Ru(Pl,o)),--r;++r<i;)t=n[r],u.shift(),u.push(t[0]),o.shift(),o.push(t[1]),Du(e,u,o);return e.join("")}function qu(n){for(var t,e,r=-1,i=n.length,u=i+4,o=[],a=[];++r<4;)e=n[r%i],o.push(e[0]),a.push(e[1]);for(t=[Ru(Pl,o),",",Ru(Pl,a)],--r;++r<u;)e=n[r%i],o.shift(),o.push(e[0]),a.shift(),a.push(e[1]),Du(t,o,a);return t.join("")}function Tu(n,t){var e=n.length-1;if(e)for(var r,i,u=n[0][0],o=n[0][1],a=n[e][0]-u,l=n[e][1]-o,c=-1;++c<=e;)r=n[c],i=c/e,r[0]=t*r[0]+(1-t)*(u+i*a),r[1]=t*r[1]+(1-t)*(o+i*l);return zu(n)}function Ru(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]+n[3]*t[3]}function Du(n,t,e){n.push("C",Ru(Rl,t),",",Ru(Rl,e),",",Ru(Dl,t),",",Ru(Dl,e),",",Ru(Pl,t),",",Ru(Pl,e))}function Pu(n,t){return(t[1]-n[1])/(t[0]-n[0])}function Uu(n){for(var t=0,e=n.length-1,r=[],i=n[0],u=n[1],o=r[0]=Pu(i,u);++t<e;)r[t]=(o+(o=Pu(i=u,u=n[t+1])))/2;return r[t]=o,r}function ju(n){for(var t,e,r,i,u=[],o=Uu(n),a=-1,l=n.length-1;++a<l;)t=Pu(n[a],n[a+1]),xo(t)<Uo?o[a]=o[a+1]=0:(e=o[a]/t,r=o[a+1]/t,i=e*e+r*r,i>9&&(i=3*t/Math.sqrt(i),o[a]=i*e,o[a+1]=i*r));for(a=-1;++a<=l;)i=(n[Math.min(l,a+1)][0]-n[Math.max(0,a-1)][0])/(6*(1+o[a]*o[a])),u.push([i||0,o[a]*i||0]);return u}function Fu(n){return n.length<3?xu(n):n[0]+Au(n,ju(n))}function Hu(n){for(var t,e,r,i=-1,u=n.length;++i<u;)t=n[i],e=t[0],r=t[1]-Io,t[0]=e*Math.cos(r),t[1]=e*Math.sin(r);return n}function Ou(n){function t(t){function l(){v.push("M",a(n(y),s),f,c(n(d.reverse()),s),"Z")}for(var h,p,g,v=[],d=[],y=[],m=-1,M=t.length,x=En(e),b=En(i),_=e===r?function(){
+return p}:En(r),w=i===u?function(){return g}:En(u);++m<M;)o.call(this,h=t[m],m)?(d.push([p=+x.call(this,h,m),g=+b.call(this,h,m)]),y.push([+_.call(this,h,m),+w.call(this,h,m)])):d.length&&(l(),d=[],y=[]);return d.length&&l(),v.length?v.join(""):null}var e=Ce,r=Ce,i=0,u=ze,o=zt,a=xu,l=a.key,c=a,f="L",s=.7;return t.x=function(n){return arguments.length?(e=r=n,t):r},t.x0=function(n){return arguments.length?(e=n,t):e},t.x1=function(n){return arguments.length?(r=n,t):r},t.y=function(n){return arguments.length?(i=u=n,t):u},t.y0=function(n){return arguments.length?(i=n,t):i},t.y1=function(n){return arguments.length?(u=n,t):u},t.defined=function(n){return arguments.length?(o=n,t):o},t.interpolate=function(n){return arguments.length?(l="function"==typeof n?a=n:(a=Tl.get(n)||xu).key,c=a.reverse||a,f=a.closed?"M":"L",t):l},t.tension=function(n){return arguments.length?(s=n,t):s},t}function Iu(n){return n.radius}function Yu(n){return[n.x,n.y]}function Zu(n){return function(){var t=n.apply(this,arguments),e=t[0],r=t[1]-Io;return[e*Math.cos(r),e*Math.sin(r)]}}function Vu(){return 64}function Xu(){return"circle"}function $u(n){var t=Math.sqrt(n/Fo);return"M0,"+t+"A"+t+","+t+" 0 1,1 0,"+-t+"A"+t+","+t+" 0 1,1 0,"+t+"Z"}function Bu(n){return function(){var t,e,r;(t=this[n])&&(r=t[e=t.active])&&(r.timer.c=null,r.timer.t=NaN,--t.count?delete t[e]:delete this[n],t.active+=.5,r.event&&r.event.interrupt.call(this,this.__data__,r.index))}}function Wu(n,t,e){return ko(n,Yl),n.namespace=t,n.id=e,n}function Ju(n,t,e,r){var i=n.id,u=n.namespace;return Y(n,"function"==typeof e?function(n,o,a){n[u][i].tween.set(t,r(e.call(n,n.__data__,o,a)))}:(e=r(e),function(n){n[u][i].tween.set(t,e)}))}function Gu(n){return null==n&&(n=""),function(){this.textContent=n}}function Ku(n){return null==n?"__transition__":"__transition_"+n+"__"}function Qu(n,t,e,r,i){function u(n){var t=v.delay;return f.t=t+l,n>=t?o(n-t):void(f.c=o)}function o(e){var i=g.active,u=g[i];u&&(u.timer.c=null,u.timer.t=NaN,--g.count,delete g[i],u.event&&u.event.interrupt.call(n,n.__data__,u.index));for(var o in g)if(r>+o){var c=g[o];c.timer.c=null,c.timer.t=NaN,--g.count,delete g[o]}f.c=a,qn(function(){return f.c&&a(e||1)&&(f.c=null,f.t=NaN),1},0,l),g.active=r,v.event&&v.event.start.call(n,n.__data__,t),p=[],v.tween.forEach(function(e,r){(r=r.call(n,n.__data__,t))&&p.push(r)}),h=v.ease,s=v.duration}function a(i){for(var u=i/s,o=h(u),a=p.length;a>0;)p[--a].call(n,o);return u>=1?(v.event&&v.event.end.call(n,n.__data__,t),--g.count?delete g[r]:delete n[e],1):void 0}var l,f,s,h,p,g=n[e]||(n[e]={active:0,count:0}),v=g[r];v||(l=i.time,f=qn(u,0,l),v=g[r]={tween:new c,time:l,timer:f,delay:i.delay,duration:i.duration,ease:i.ease,index:t},i=null,++g.count)}function no(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate("+(isFinite(r)?r:e(n))+",0)"})}function to(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate(0,"+(isFinite(r)?r:e(n))+")"})}function eo(n){return n.toISOString()}function ro(n,t,e){function r(t){return n(t)}function i(n,e){var r=n[1]-n[0],i=r/e,u=ao.bisect(Kl,i);return u==Kl.length?[t.year,Ki(n.map(function(n){return n/31536e6}),e)[2]]:u?t[i/Kl[u-1]<Kl[u]/i?u-1:u]:[tc,Ki(n,e)[2]]}return r.invert=function(t){return io(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain(t),r):n.domain().map(io)},r.nice=function(n,t){function e(e){return!isNaN(e)&&!n.range(e,io(+e+1),t).length}var u=r.domain(),o=Yi(u),a=null==n?i(o,10):"number"==typeof n&&i(o,n);return a&&(n=a[0],t=a[1]),r.domain(Xi(u,t>1?{floor:function(t){for(;e(t=n.floor(t));)t=io(t-1);return t},ceil:function(t){for(;e(t=n.ceil(t));)t=io(+t+1);return t}}:n))},r.ticks=function(n,t){var e=Yi(r.domain()),u=null==n?i(e,10):"number"==typeof n?i(e,n):!n.range&&[{range:n},t];return u&&(n=u[0],t=u[1]),n.range(e[0],io(+e[1]+1),1>t?1:t)},r.tickFormat=function(){return e},r.copy=function(){return ro(n.copy(),t,e)},Ji(r,n)}function io(n){return new Date(n)}function uo(n){return JSON.parse(n.responseText)}function oo(n){var t=fo.createRange();return t.selectNode(fo.body),t.createContextualFragment(n.responseText)}var ao={version:"3.5.17"},lo=[].slice,co=function(n){return lo.call(n)},fo=this.document;if(fo)try{co(fo.documentElement.childNodes)[0].nodeType}catch(so){co=function(n){for(var t=n.length,e=new Array(t);t--;)e[t]=n[t];return e}}if(Date.now||(Date.now=function(){return+new Date}),fo)try{fo.createElement("DIV").style.setProperty("opacity",0,"")}catch(ho){var po=this.Element.prototype,go=po.setAttribute,vo=po.setAttributeNS,yo=this.CSSStyleDeclaration.prototype,mo=yo.setProperty;po.setAttribute=function(n,t){go.call(this,n,t+"")},po.setAttributeNS=function(n,t,e){vo.call(this,n,t,e+"")},yo.setProperty=function(n,t,e){mo.call(this,n,t+"",e)}}ao.ascending=e,ao.descending=function(n,t){return n>t?-1:t>n?1:t>=n?0:NaN},ao.min=function(n,t){var e,r,i=-1,u=n.length;if(1===arguments.length){for(;++i<u;)if(null!=(r=n[i])&&r>=r){e=r;break}for(;++i<u;)null!=(r=n[i])&&e>r&&(e=r)}else{for(;++i<u;)if(null!=(r=t.call(n,n[i],i))&&r>=r){e=r;break}for(;++i<u;)null!=(r=t.call(n,n[i],i))&&e>r&&(e=r)}return e},ao.max=function(n,t){var e,r,i=-1,u=n.length;if(1===arguments.length){for(;++i<u;)if(null!=(r=n[i])&&r>=r){e=r;break}for(;++i<u;)null!=(r=n[i])&&r>e&&(e=r)}else{for(;++i<u;)if(null!=(r=t.call(n,n[i],i))&&r>=r){e=r;break}for(;++i<u;)null!=(r=t.call(n,n[i],i))&&r>e&&(e=r)}return e},ao.extent=function(n,t){var e,r,i,u=-1,o=n.length;if(1===arguments.length){for(;++u<o;)if(null!=(r=n[u])&&r>=r){e=i=r;break}for(;++u<o;)null!=(r=n[u])&&(e>r&&(e=r),r>i&&(i=r))}else{for(;++u<o;)if(null!=(r=t.call(n,n[u],u))&&r>=r){e=i=r;break}for(;++u<o;)null!=(r=t.call(n,n[u],u))&&(e>r&&(e=r),r>i&&(i=r))}return[e,i]},ao.sum=function(n,t){var e,r=0,u=n.length,o=-1;if(1===arguments.length)for(;++o<u;)i(e=+n[o])&&(r+=e);else for(;++o<u;)i(e=+t.call(n,n[o],o))&&(r+=e);return r},ao.mean=function(n,t){var e,u=0,o=n.length,a=-1,l=o;if(1===arguments.length)for(;++a<o;)i(e=r(n[a]))?u+=e:--l;else for(;++a<o;)i(e=r(t.call(n,n[a],a)))?u+=e:--l;return l?u/l:void 0},ao.quantile=function(n,t){var e=(n.length-1)*t+1,r=Math.floor(e),i=+n[r-1],u=e-r;return u?i+u*(n[r]-i):i},ao.median=function(n,t){var u,o=[],a=n.length,l=-1;if(1===arguments.length)for(;++l<a;)i(u=r(n[l]))&&o.push(u);else for(;++l<a;)i(u=r(t.call(n,n[l],l)))&&o.push(u);return o.length?ao.quantile(o.sort(e),.5):void 0},ao.variance=function(n,t){var e,u,o=n.length,a=0,l=0,c=-1,f=0;if(1===arguments.length)for(;++c<o;)i(e=r(n[c]))&&(u=e-a,a+=u/++f,l+=u*(e-a));else for(;++c<o;)i(e=r(t.call(n,n[c],c)))&&(u=e-a,a+=u/++f,l+=u*(e-a));return f>1?l/(f-1):void 0},ao.deviation=function(){var n=ao.variance.apply(this,arguments);return n?Math.sqrt(n):n};var Mo=u(e);ao.bisectLeft=Mo.left,ao.bisect=ao.bisectRight=Mo.right,ao.bisector=function(n){return u(1===n.length?function(t,r){return e(n(t),r)}:n)},ao.shuffle=function(n,t,e){(u=arguments.length)<3&&(e=n.length,2>u&&(t=0));for(var r,i,u=e-t;u;)i=Math.random()*u--|0,r=n[u+t],n[u+t]=n[i+t],n[i+t]=r;return n},ao.permute=function(n,t){for(var e=t.length,r=new Array(e);e--;)r[e]=n[t[e]];return r},ao.pairs=function(n){for(var t,e=0,r=n.length-1,i=n[0],u=new Array(0>r?0:r);r>e;)u[e]=[t=i,i=n[++e]];return u},ao.transpose=function(n){if(!(i=n.length))return[];for(var t=-1,e=ao.min(n,o),r=new Array(e);++t<e;)for(var i,u=-1,a=r[t]=new Array(i);++u<i;)a[u]=n[u][t];return r},ao.zip=function(){return ao.transpose(arguments)},ao.keys=function(n){var t=[];for(var e in n)t.push(e);return t},ao.values=function(n){var t=[];for(var e in n)t.push(n[e]);return t},ao.entries=function(n){var t=[];for(var e in n)t.push({key:e,value:n[e]});return t},ao.merge=function(n){for(var t,e,r,i=n.length,u=-1,o=0;++u<i;)o+=n[u].length;for(e=new Array(o);--i>=0;)for(r=n[i],t=r.length;--t>=0;)e[--o]=r[t];return e};var xo=Math.abs;ao.range=function(n,t,e){if(arguments.length<3&&(e=1,arguments.length<2&&(t=n,n=0)),(t-n)/e===1/0)throw new Error("infinite range");var r,i=[],u=a(xo(e)),o=-1;if(n*=u,t*=u,e*=u,0>e)for(;(r=n+e*++o)>t;)i.push(r/u);else for(;(r=n+e*++o)<t;)i.push(r/u);return i},ao.map=function(n,t){var e=new c;if(n instanceof c)n.forEach(function(n,t){e.set(n,t)});else if(Array.isArray(n)){var r,i=-1,u=n.length;if(1===arguments.length)for(;++i<u;)e.set(i,n[i]);else for(;++i<u;)e.set(t.call(n,r=n[i],i),r)}else for(var o in n)e.set(o,n[o]);return e};var bo="__proto__",_o="\x00";l(c,{has:h,get:function(n){return this._[f(n)]},set:function(n,t){return this._[f(n)]=t},remove:p,keys:g,values:function(){var n=[];for(var t in this._)n.push(this._[t]);return n},entries:function(){var n=[];for(var t in this._)n.push({key:s(t),value:this._[t]});return n},size:v,empty:d,forEach:function(n){for(var t in this._)n.call(this,s(t),this._[t])}}),ao.nest=function(){function n(t,o,a){if(a>=u.length)return r?r.call(i,o):e?o.sort(e):o;for(var l,f,s,h,p=-1,g=o.length,v=u[a++],d=new c;++p<g;)(h=d.get(l=v(f=o[p])))?h.push(f):d.set(l,[f]);return t?(f=t(),s=function(e,r){f.set(e,n(t,r,a))}):(f={},s=function(e,r){f[e]=n(t,r,a)}),d.forEach(s),f}function t(n,e){if(e>=u.length)return n;var r=[],i=o[e++];return n.forEach(function(n,i){r.push({key:n,values:t(i,e)})}),i?r.sort(function(n,t){return i(n.key,t.key)}):r}var e,r,i={},u=[],o=[];return i.map=function(t,e){return n(e,t,0)},i.entries=function(e){return t(n(ao.map,e,0),0)},i.key=function(n){return u.push(n),i},i.sortKeys=function(n){return o[u.length-1]=n,i},i.sortValues=function(n){return e=n,i},i.rollup=function(n){return r=n,i},i},ao.set=function(n){var t=new y;if(n)for(var e=0,r=n.length;r>e;++e)t.add(n[e]);return t},l(y,{has:h,add:function(n){return this._[f(n+="")]=!0,n},remove:p,values:g,size:v,empty:d,forEach:function(n){for(var t in this._)n.call(this,s(t))}}),ao.behavior={},ao.rebind=function(n,t){for(var e,r=1,i=arguments.length;++r<i;)n[e=arguments[r]]=M(n,t,t[e]);return n};var wo=["webkit","ms","moz","Moz","o","O"];ao.dispatch=function(){for(var n=new _,t=-1,e=arguments.length;++t<e;)n[arguments[t]]=w(n);return n},_.prototype.on=function(n,t){var e=n.indexOf("."),r="";if(e>=0&&(r=n.slice(e+1),n=n.slice(0,e)),n)return arguments.length<2?this[n].on(r):this[n].on(r,t);if(2===arguments.length){if(null==t)for(n in this)this.hasOwnProperty(n)&&this[n].on(r,null);return this}},ao.event=null,ao.requote=function(n){return n.replace(So,"\\$&")};var So=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g,ko={}.__proto__?function(n,t){n.__proto__=t}:function(n,t){for(var e in t)n[e]=t[e]},No=function(n,t){return t.querySelector(n)},Eo=function(n,t){return t.querySelectorAll(n)},Ao=function(n,t){var e=n.matches||n[x(n,"matchesSelector")];return(Ao=function(n,t){return e.call(n,t)})(n,t)};"function"==typeof Sizzle&&(No=function(n,t){return Sizzle(n,t)[0]||null},Eo=Sizzle,Ao=Sizzle.matchesSelector),ao.selection=function(){return ao.select(fo.documentElement)};var Co=ao.selection.prototype=[];Co.select=function(n){var t,e,r,i,u=[];n=A(n);for(var o=-1,a=this.length;++o<a;){u.push(t=[]),t.parentNode=(r=this[o]).parentNode;for(var l=-1,c=r.length;++l<c;)(i=r[l])?(t.push(e=n.call(i,i.__data__,l,o)),e&&"__data__"in i&&(e.__data__=i.__data__)):t.push(null)}return E(u)},Co.selectAll=function(n){var t,e,r=[];n=C(n);for(var i=-1,u=this.length;++i<u;)for(var o=this[i],a=-1,l=o.length;++a<l;)(e=o[a])&&(r.push(t=co(n.call(e,e.__data__,a,i))),t.parentNode=e);return E(r)};var zo="http://www.w3.org/1999/xhtml",Lo={svg:"http://www.w3.org/2000/svg",xhtml:zo,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};ao.ns={prefix:Lo,qualify:function(n){var t=n.indexOf(":"),e=n;return t>=0&&"xmlns"!==(e=n.slice(0,t))&&(n=n.slice(t+1)),Lo.hasOwnProperty(e)?{space:Lo[e],local:n}:n}},Co.attr=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node();return n=ao.ns.qualify(n),n.local?e.getAttributeNS(n.space,n.local):e.getAttribute(n)}for(t in n)this.each(z(t,n[t]));return this}return this.each(z(n,t))},Co.classed=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node(),r=(n=T(n)).length,i=-1;if(t=e.classList){for(;++i<r;)if(!t.contains(n[i]))return!1}else for(t=e.getAttribute("class");++i<r;)if(!q(n[i]).test(t))return!1;return!0}for(t in n)this.each(R(t,n[t]));return this}return this.each(R(n,t))},Co.style=function(n,e,r){var i=arguments.length;if(3>i){if("string"!=typeof n){2>i&&(e="");for(r in n)this.each(P(r,n[r],e));return this}if(2>i){var u=this.node();return t(u).getComputedStyle(u,null).getPropertyValue(n)}r=""}return this.each(P(n,e,r))},Co.property=function(n,t){if(arguments.length<2){if("string"==typeof n)return this.node()[n];for(t in n)this.each(U(t,n[t]));return this}return this.each(U(n,t))},Co.text=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.textContent=null==t?"":t}:null==n?function(){this.textContent=""}:function(){this.textContent=n}):this.node().textContent},Co.html=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.innerHTML=null==t?"":t}:null==n?function(){this.innerHTML=""}:function(){this.innerHTML=n}):this.node().innerHTML},Co.append=function(n){return n=j(n),this.select(function(){return this.appendChild(n.apply(this,arguments))})},Co.insert=function(n,t){return n=j(n),t=A(t),this.select(function(){return this.insertBefore(n.apply(this,arguments),t.apply(this,arguments)||null)})},Co.remove=function(){return this.each(F)},Co.data=function(n,t){function e(n,e){var r,i,u,o=n.length,s=e.length,h=Math.min(o,s),p=new Array(s),g=new Array(s),v=new Array(o);if(t){var d,y=new c,m=new Array(o);for(r=-1;++r<o;)(i=n[r])&&(y.has(d=t.call(i,i.__data__,r))?v[r]=i:y.set(d,i),m[r]=d);for(r=-1;++r<s;)(i=y.get(d=t.call(e,u=e[r],r)))?i!==!0&&(p[r]=i,i.__data__=u):g[r]=H(u),y.set(d,!0);for(r=-1;++r<o;)r in m&&y.get(m[r])!==!0&&(v[r]=n[r])}else{for(r=-1;++r<h;)i=n[r],u=e[r],i?(i.__data__=u,p[r]=i):g[r]=H(u);for(;s>r;++r)g[r]=H(e[r]);for(;o>r;++r)v[r]=n[r]}g.update=p,g.parentNode=p.parentNode=v.parentNode=n.parentNode,a.push(g),l.push(p),f.push(v)}var r,i,u=-1,o=this.length;if(!arguments.length){for(n=new Array(o=(r=this[0]).length);++u<o;)(i=r[u])&&(n[u]=i.__data__);return n}var a=Z([]),l=E([]),f=E([]);if("function"==typeof n)for(;++u<o;)e(r=this[u],n.call(r,r.parentNode.__data__,u));else for(;++u<o;)e(r=this[u],n);return l.enter=function(){return a},l.exit=function(){return f},l},Co.datum=function(n){return arguments.length?this.property("__data__",n):this.property("__data__")},Co.filter=function(n){var t,e,r,i=[];"function"!=typeof n&&(n=O(n));for(var u=0,o=this.length;o>u;u++){i.push(t=[]),t.parentNode=(e=this[u]).parentNode;for(var a=0,l=e.length;l>a;a++)(r=e[a])&&n.call(r,r.__data__,a,u)&&t.push(r)}return E(i)},Co.order=function(){for(var n=-1,t=this.length;++n<t;)for(var e,r=this[n],i=r.length-1,u=r[i];--i>=0;)(e=r[i])&&(u&&u!==e.nextSibling&&u.parentNode.insertBefore(e,u),u=e);return this},Co.sort=function(n){n=I.apply(this,arguments);for(var t=-1,e=this.length;++t<e;)this[t].sort(n);return this.order()},Co.each=function(n){return Y(this,function(t,e,r){n.call(t,t.__data__,e,r)})},Co.call=function(n){var t=co(arguments);return n.apply(t[0]=this,t),this},Co.empty=function(){return!this.node()},Co.node=function(){for(var n=0,t=this.length;t>n;n++)for(var e=this[n],r=0,i=e.length;i>r;r++){var u=e[r];if(u)return u}return null},Co.size=function(){var n=0;return Y(this,function(){++n}),n};var qo=[];ao.selection.enter=Z,ao.selection.enter.prototype=qo,qo.append=Co.append,qo.empty=Co.empty,qo.node=Co.node,qo.call=Co.call,qo.size=Co.size,qo.select=function(n){for(var t,e,r,i,u,o=[],a=-1,l=this.length;++a<l;){r=(i=this[a]).update,o.push(t=[]),t.parentNode=i.parentNode;for(var c=-1,f=i.length;++c<f;)(u=i[c])?(t.push(r[c]=e=n.call(i.parentNode,u.__data__,c,a)),e.__data__=u.__data__):t.push(null)}return E(o)},qo.insert=function(n,t){return arguments.length<2&&(t=V(this)),Co.insert.call(this,n,t)},ao.select=function(t){var e;return"string"==typeof t?(e=[No(t,fo)],e.parentNode=fo.documentElement):(e=[t],e.parentNode=n(t)),E([e])},ao.selectAll=function(n){var t;return"string"==typeof n?(t=co(Eo(n,fo)),t.parentNode=fo.documentElement):(t=co(n),t.parentNode=null),E([t])},Co.on=function(n,t,e){var r=arguments.length;if(3>r){if("string"!=typeof n){2>r&&(t=!1);for(e in n)this.each(X(e,n[e],t));return this}if(2>r)return(r=this.node()["__on"+n])&&r._;e=!1}return this.each(X(n,t,e))};var To=ao.map({mouseenter:"mouseover",mouseleave:"mouseout"});fo&&To.forEach(function(n){"on"+n in fo&&To.remove(n)});var Ro,Do=0;ao.mouse=function(n){return J(n,k())};var Po=this.navigator&&/WebKit/.test(this.navigator.userAgent)?-1:0;ao.touch=function(n,t,e){if(arguments.length<3&&(e=t,t=k().changedTouches),t)for(var r,i=0,u=t.length;u>i;++i)if((r=t[i]).identifier===e)return J(n,r)},ao.behavior.drag=function(){function n(){this.on("mousedown.drag",u).on("touchstart.drag",o)}function e(n,t,e,u,o){return function(){function a(){var n,e,r=t(h,v);r&&(n=r[0]-M[0],e=r[1]-M[1],g|=n|e,M=r,p({type:"drag",x:r[0]+c[0],y:r[1]+c[1],dx:n,dy:e}))}function l(){t(h,v)&&(y.on(u+d,null).on(o+d,null),m(g),p({type:"dragend"}))}var c,f=this,s=ao.event.target.correspondingElement||ao.event.target,h=f.parentNode,p=r.of(f,arguments),g=0,v=n(),d=".drag"+(null==v?"":"-"+v),y=ao.select(e(s)).on(u+d,a).on(o+d,l),m=W(s),M=t(h,v);i?(c=i.apply(f,arguments),c=[c.x-M[0],c.y-M[1]]):c=[0,0],p({type:"dragstart"})}}var r=N(n,"drag","dragstart","dragend"),i=null,u=e(b,ao.mouse,t,"mousemove","mouseup"),o=e(G,ao.touch,m,"touchmove","touchend");return n.origin=function(t){return arguments.length?(i=t,n):i},ao.rebind(n,r,"on")},ao.touches=function(n,t){return arguments.length<2&&(t=k().touches),t?co(t).map(function(t){var e=J(n,t);return e.identifier=t.identifier,e}):[]};var Uo=1e-6,jo=Uo*Uo,Fo=Math.PI,Ho=2*Fo,Oo=Ho-Uo,Io=Fo/2,Yo=Fo/180,Zo=180/Fo,Vo=Math.SQRT2,Xo=2,$o=4;ao.interpolateZoom=function(n,t){var e,r,i=n[0],u=n[1],o=n[2],a=t[0],l=t[1],c=t[2],f=a-i,s=l-u,h=f*f+s*s;if(jo>h)r=Math.log(c/o)/Vo,e=function(n){return[i+n*f,u+n*s,o*Math.exp(Vo*n*r)]};else{var p=Math.sqrt(h),g=(c*c-o*o+$o*h)/(2*o*Xo*p),v=(c*c-o*o-$o*h)/(2*c*Xo*p),d=Math.log(Math.sqrt(g*g+1)-g),y=Math.log(Math.sqrt(v*v+1)-v);r=(y-d)/Vo,e=function(n){var t=n*r,e=rn(d),a=o/(Xo*p)*(e*un(Vo*t+d)-en(d));return[i+a*f,u+a*s,o*e/rn(Vo*t+d)]}}return e.duration=1e3*r,e},ao.behavior.zoom=function(){function n(n){n.on(L,s).on(Wo+".zoom",p).on("dblclick.zoom",g).on(R,h)}function e(n){return[(n[0]-k.x)/k.k,(n[1]-k.y)/k.k]}function r(n){return[n[0]*k.k+k.x,n[1]*k.k+k.y]}function i(n){k.k=Math.max(A[0],Math.min(A[1],n))}function u(n,t){t=r(t),k.x+=n[0]-t[0],k.y+=n[1]-t[1]}function o(t,e,r,o){t.__chart__={x:k.x,y:k.y,k:k.k},i(Math.pow(2,o)),u(d=e,r),t=ao.select(t),C>0&&(t=t.transition().duration(C)),t.call(n.event)}function a(){b&&b.domain(x.range().map(function(n){return(n-k.x)/k.k}).map(x.invert)),w&&w.domain(_.range().map(function(n){return(n-k.y)/k.k}).map(_.invert))}function l(n){z++||n({type:"zoomstart"})}function c(n){a(),n({type:"zoom",scale:k.k,translate:[k.x,k.y]})}function f(n){--z||(n({type:"zoomend"}),d=null)}function s(){function n(){a=1,u(ao.mouse(i),h),c(o)}function r(){s.on(q,null).on(T,null),p(a),f(o)}var i=this,o=D.of(i,arguments),a=0,s=ao.select(t(i)).on(q,n).on(T,r),h=e(ao.mouse(i)),p=W(i);Il.call(i),l(o)}function h(){function n(){var n=ao.touches(g);return p=k.k,n.forEach(function(n){n.identifier in d&&(d[n.identifier]=e(n))}),n}function t(){var t=ao.event.target;ao.select(t).on(x,r).on(b,a),_.push(t);for(var e=ao.event.changedTouches,i=0,u=e.length;u>i;++i)d[e[i].identifier]=null;var l=n(),c=Date.now();if(1===l.length){if(500>c-M){var f=l[0];o(g,f,d[f.identifier],Math.floor(Math.log(k.k)/Math.LN2)+1),S()}M=c}else if(l.length>1){var f=l[0],s=l[1],h=f[0]-s[0],p=f[1]-s[1];y=h*h+p*p}}function r(){var n,t,e,r,o=ao.touches(g);Il.call(g);for(var a=0,l=o.length;l>a;++a,r=null)if(e=o[a],r=d[e.identifier]){if(t)break;n=e,t=r}if(r){var f=(f=e[0]-n[0])*f+(f=e[1]-n[1])*f,s=y&&Math.sqrt(f/y);n=[(n[0]+e[0])/2,(n[1]+e[1])/2],t=[(t[0]+r[0])/2,(t[1]+r[1])/2],i(s*p)}M=null,u(n,t),c(v)}function a(){if(ao.event.touches.length){for(var t=ao.event.changedTouches,e=0,r=t.length;r>e;++e)delete d[t[e].identifier];for(var i in d)return void n()}ao.selectAll(_).on(m,null),w.on(L,s).on(R,h),N(),f(v)}var p,g=this,v=D.of(g,arguments),d={},y=0,m=".zoom-"+ao.event.changedTouches[0].identifier,x="touchmove"+m,b="touchend"+m,_=[],w=ao.select(g),N=W(g);t(),l(v),w.on(L,null).on(R,t)}function p(){var n=D.of(this,arguments);m?clearTimeout(m):(Il.call(this),v=e(d=y||ao.mouse(this)),l(n)),m=setTimeout(function(){m=null,f(n)},50),S(),i(Math.pow(2,.002*Bo())*k.k),u(d,v),c(n)}function g(){var n=ao.mouse(this),t=Math.log(k.k)/Math.LN2;o(this,n,e(n),ao.event.shiftKey?Math.ceil(t)-1:Math.floor(t)+1)}var v,d,y,m,M,x,b,_,w,k={x:0,y:0,k:1},E=[960,500],A=Jo,C=250,z=0,L="mousedown.zoom",q="mousemove.zoom",T="mouseup.zoom",R="touchstart.zoom",D=N(n,"zoomstart","zoom","zoomend");return Wo||(Wo="onwheel"in fo?(Bo=function(){return-ao.event.deltaY*(ao.event.deltaMode?120:1)},"wheel"):"onmousewheel"in fo?(Bo=function(){return ao.event.wheelDelta},"mousewheel"):(Bo=function(){return-ao.event.detail},"MozMousePixelScroll")),n.event=function(n){n.each(function(){var n=D.of(this,arguments),t=k;Hl?ao.select(this).transition().each("start.zoom",function(){k=this.__chart__||{x:0,y:0,k:1},l(n)}).tween("zoom:zoom",function(){var e=E[0],r=E[1],i=d?d[0]:e/2,u=d?d[1]:r/2,o=ao.interpolateZoom([(i-k.x)/k.k,(u-k.y)/k.k,e/k.k],[(i-t.x)/t.k,(u-t.y)/t.k,e/t.k]);return function(t){var r=o(t),a=e/r[2];this.__chart__=k={x:i-r[0]*a,y:u-r[1]*a,k:a},c(n)}}).each("interrupt.zoom",function(){f(n)}).each("end.zoom",function(){f(n)}):(this.__chart__=k,l(n),c(n),f(n))})},n.translate=function(t){return arguments.length?(k={x:+t[0],y:+t[1],k:k.k},a(),n):[k.x,k.y]},n.scale=function(t){return arguments.length?(k={x:k.x,y:k.y,k:null},i(+t),a(),n):k.k},n.scaleExtent=function(t){return arguments.length?(A=null==t?Jo:[+t[0],+t[1]],n):A},n.center=function(t){return arguments.length?(y=t&&[+t[0],+t[1]],n):y},n.size=function(t){return arguments.length?(E=t&&[+t[0],+t[1]],n):E},n.duration=function(t){return arguments.length?(C=+t,n):C},n.x=function(t){return arguments.length?(b=t,x=t.copy(),k={x:0,y:0,k:1},n):b},n.y=function(t){return arguments.length?(w=t,_=t.copy(),k={x:0,y:0,k:1},n):w},ao.rebind(n,D,"on")};var Bo,Wo,Jo=[0,1/0];ao.color=an,an.prototype.toString=function(){return this.rgb()+""},ao.hsl=ln;var Go=ln.prototype=new an;Go.brighter=function(n){return n=Math.pow(.7,arguments.length?n:1),new ln(this.h,this.s,this.l/n)},Go.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new ln(this.h,this.s,n*this.l)},Go.rgb=function(){return cn(this.h,this.s,this.l)},ao.hcl=fn;var Ko=fn.prototype=new an;Ko.brighter=function(n){return new fn(this.h,this.c,Math.min(100,this.l+Qo*(arguments.length?n:1)))},Ko.darker=function(n){return new fn(this.h,this.c,Math.max(0,this.l-Qo*(arguments.length?n:1)))},Ko.rgb=function(){return sn(this.h,this.c,this.l).rgb()},ao.lab=hn;var Qo=18,na=.95047,ta=1,ea=1.08883,ra=hn.prototype=new an;ra.brighter=function(n){return new hn(Math.min(100,this.l+Qo*(arguments.length?n:1)),this.a,this.b)},ra.darker=function(n){return new hn(Math.max(0,this.l-Qo*(arguments.length?n:1)),this.a,this.b)},ra.rgb=function(){return pn(this.l,this.a,this.b)},ao.rgb=mn;var ia=mn.prototype=new an;ia.brighter=function(n){n=Math.pow(.7,arguments.length?n:1);var t=this.r,e=this.g,r=this.b,i=30;return t||e||r?(t&&i>t&&(t=i),e&&i>e&&(e=i),r&&i>r&&(r=i),new mn(Math.min(255,t/n),Math.min(255,e/n),Math.min(255,r/n))):new mn(i,i,i)},ia.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new mn(n*this.r,n*this.g,n*this.b)},ia.hsl=function(){return wn(this.r,this.g,this.b)},ia.toString=function(){return"#"+bn(this.r)+bn(this.g)+bn(this.b)};var ua=ao.map({aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074});ua.forEach(function(n,t){ua.set(n,Mn(t))}),ao.functor=En,ao.xhr=An(m),ao.dsv=function(n,t){function e(n,e,u){arguments.length<3&&(u=e,e=null);var o=Cn(n,t,null==e?r:i(e),u);return o.row=function(n){return arguments.length?o.response(null==(e=n)?r:i(n)):e},o}function r(n){return e.parse(n.responseText)}function i(n){return function(t){return e.parse(t.responseText,n)}}function u(t){return t.map(o).join(n)}function o(n){return a.test(n)?'"'+n.replace(/\"/g,'""')+'"':n}var a=new RegExp('["'+n+"\n]"),l=n.charCodeAt(0);return e.parse=function(n,t){var r;return e.parseRows(n,function(n,e){if(r)return r(n,e-1);var i=new Function("d","return {"+n.map(function(n,t){return JSON.stringify(n)+": d["+t+"]"}).join(",")+"}");r=t?function(n,e){return t(i(n),e)}:i})},e.parseRows=function(n,t){function e(){if(f>=c)return o;if(i)return i=!1,u;var t=f;if(34===n.charCodeAt(t)){for(var e=t;e++<c;)if(34===n.charCodeAt(e)){if(34!==n.charCodeAt(e+1))break;++e}f=e+2;var r=n.charCodeAt(e+1);return 13===r?(i=!0,10===n.charCodeAt(e+2)&&++f):10===r&&(i=!0),n.slice(t+1,e).replace(/""/g,'"')}for(;c>f;){var r=n.charCodeAt(f++),a=1;if(10===r)i=!0;else if(13===r)i=!0,10===n.charCodeAt(f)&&(++f,++a);else if(r!==l)continue;return n.slice(t,f-a)}return n.slice(t)}for(var r,i,u={},o={},a=[],c=n.length,f=0,s=0;(r=e())!==o;){for(var h=[];r!==u&&r!==o;)h.push(r),r=e();t&&null==(h=t(h,s++))||a.push(h)}return a},e.format=function(t){if(Array.isArray(t[0]))return e.formatRows(t);var r=new y,i=[];return t.forEach(function(n){for(var t in n)r.has(t)||i.push(r.add(t))}),[i.map(o).join(n)].concat(t.map(function(t){return i.map(function(n){return o(t[n])}).join(n)})).join("\n")},e.formatRows=function(n){return n.map(u).join("\n")},e},ao.csv=ao.dsv(",","text/csv"),ao.tsv=ao.dsv("	","text/tab-separated-values");var oa,aa,la,ca,fa=this[x(this,"requestAnimationFrame")]||function(n){setTimeout(n,17)};ao.timer=function(){qn.apply(this,arguments)},ao.timer.flush=function(){Rn(),Dn()},ao.round=function(n,t){return t?Math.round(n*(t=Math.pow(10,t)))/t:Math.round(n)};var sa=["y","z","a","f","p","n","\xb5","m","","k","M","G","T","P","E","Z","Y"].map(Un);ao.formatPrefix=function(n,t){var e=0;return(n=+n)&&(0>n&&(n*=-1),t&&(n=ao.round(n,Pn(n,t))),e=1+Math.floor(1e-12+Math.log(n)/Math.LN10),e=Math.max(-24,Math.min(24,3*Math.floor((e-1)/3)))),sa[8+e/3]};var ha=/(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i,pa=ao.map({b:function(n){return n.toString(2)},c:function(n){return String.fromCharCode(n)},o:function(n){return n.toString(8)},x:function(n){return n.toString(16)},X:function(n){return n.toString(16).toUpperCase()},g:function(n,t){return n.toPrecision(t)},e:function(n,t){return n.toExponential(t)},f:function(n,t){return n.toFixed(t)},r:function(n,t){return(n=ao.round(n,Pn(n,t))).toFixed(Math.max(0,Math.min(20,Pn(n*(1+1e-15),t))))}}),ga=ao.time={},va=Date;Hn.prototype={getDate:function(){return this._.getUTCDate()},getDay:function(){return this._.getUTCDay()},getFullYear:function(){return this._.getUTCFullYear()},getHours:function(){return this._.getUTCHours()},getMilliseconds:function(){return this._.getUTCMilliseconds()},getMinutes:function(){return this._.getUTCMinutes()},getMonth:function(){return this._.getUTCMonth()},getSeconds:function(){return this._.getUTCSeconds()},getTime:function(){return this._.getTime()},getTimezoneOffset:function(){return 0},valueOf:function(){return this._.valueOf()},setDate:function(){da.setUTCDate.apply(this._,arguments)},setDay:function(){da.setUTCDay.apply(this._,arguments)},setFullYear:function(){da.setUTCFullYear.apply(this._,arguments)},setHours:function(){da.setUTCHours.apply(this._,arguments)},setMilliseconds:function(){da.setUTCMilliseconds.apply(this._,arguments)},setMinutes:function(){da.setUTCMinutes.apply(this._,arguments)},setMonth:function(){da.setUTCMonth.apply(this._,arguments)},setSeconds:function(){da.setUTCSeconds.apply(this._,arguments)},setTime:function(){da.setTime.apply(this._,arguments)}};var da=Date.prototype;ga.year=On(function(n){return n=ga.day(n),n.setMonth(0,1),n},function(n,t){n.setFullYear(n.getFullYear()+t)},function(n){return n.getFullYear()}),ga.years=ga.year.range,ga.years.utc=ga.year.utc.range,ga.day=On(function(n){var t=new va(2e3,0);return t.setFullYear(n.getFullYear(),n.getMonth(),n.getDate()),t},function(n,t){n.setDate(n.getDate()+t)},function(n){return n.getDate()-1}),ga.days=ga.day.range,ga.days.utc=ga.day.utc.range,ga.dayOfYear=function(n){var t=ga.year(n);return Math.floor((n-t-6e4*(n.getTimezoneOffset()-t.getTimezoneOffset()))/864e5)},["sunday","monday","tuesday","wednesday","thursday","friday","saturday"].forEach(function(n,t){t=7-t;var e=ga[n]=On(function(n){return(n=ga.day(n)).setDate(n.getDate()-(n.getDay()+t)%7),n},function(n,t){n.setDate(n.getDate()+7*Math.floor(t))},function(n){var e=ga.year(n).getDay();return Math.floor((ga.dayOfYear(n)+(e+t)%7)/7)-(e!==t)});ga[n+"s"]=e.range,ga[n+"s"].utc=e.utc.range,ga[n+"OfYear"]=function(n){var e=ga.year(n).getDay();return Math.floor((ga.dayOfYear(n)+(e+t)%7)/7)}}),ga.week=ga.sunday,ga.weeks=ga.sunday.range,ga.weeks.utc=ga.sunday.utc.range,ga.weekOfYear=ga.sundayOfYear;var ya={"-":"",_:" ",0:"0"},ma=/^\s*\d+/,Ma=/^%/;ao.locale=function(n){return{numberFormat:jn(n),timeFormat:Yn(n)}};var xa=ao.locale({decimal:".",thousands:",",grouping:[3],currency:["$",""],dateTime:"%a %b %e %X %Y",date:"%m/%d/%Y",time:"%H:%M:%S",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],
+shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});ao.format=xa.numberFormat,ao.geo={},ft.prototype={s:0,t:0,add:function(n){st(n,this.t,ba),st(ba.s,this.s,this),this.s?this.t+=ba.t:this.s=ba.t},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var ba=new ft;ao.geo.stream=function(n,t){n&&_a.hasOwnProperty(n.type)?_a[n.type](n,t):ht(n,t)};var _a={Feature:function(n,t){ht(n.geometry,t)},FeatureCollection:function(n,t){for(var e=n.features,r=-1,i=e.length;++r<i;)ht(e[r].geometry,t)}},wa={Sphere:function(n,t){t.sphere()},Point:function(n,t){n=n.coordinates,t.point(n[0],n[1],n[2])},MultiPoint:function(n,t){for(var e=n.coordinates,r=-1,i=e.length;++r<i;)n=e[r],t.point(n[0],n[1],n[2])},LineString:function(n,t){pt(n.coordinates,t,0)},MultiLineString:function(n,t){for(var e=n.coordinates,r=-1,i=e.length;++r<i;)pt(e[r],t,0)},Polygon:function(n,t){gt(n.coordinates,t)},MultiPolygon:function(n,t){for(var e=n.coordinates,r=-1,i=e.length;++r<i;)gt(e[r],t)},GeometryCollection:function(n,t){for(var e=n.geometries,r=-1,i=e.length;++r<i;)ht(e[r],t)}};ao.geo.area=function(n){return Sa=0,ao.geo.stream(n,Na),Sa};var Sa,ka=new ft,Na={sphere:function(){Sa+=4*Fo},point:b,lineStart:b,lineEnd:b,polygonStart:function(){ka.reset(),Na.lineStart=vt},polygonEnd:function(){var n=2*ka;Sa+=0>n?4*Fo+n:n,Na.lineStart=Na.lineEnd=Na.point=b}};ao.geo.bounds=function(){function n(n,t){M.push(x=[f=n,h=n]),s>t&&(s=t),t>p&&(p=t)}function t(t,e){var r=dt([t*Yo,e*Yo]);if(y){var i=mt(y,r),u=[i[1],-i[0],0],o=mt(u,i);bt(o),o=_t(o);var l=t-g,c=l>0?1:-1,v=o[0]*Zo*c,d=xo(l)>180;if(d^(v>c*g&&c*t>v)){var m=o[1]*Zo;m>p&&(p=m)}else if(v=(v+360)%360-180,d^(v>c*g&&c*t>v)){var m=-o[1]*Zo;s>m&&(s=m)}else s>e&&(s=e),e>p&&(p=e);d?g>t?a(f,t)>a(f,h)&&(h=t):a(t,h)>a(f,h)&&(f=t):h>=f?(f>t&&(f=t),t>h&&(h=t)):t>g?a(f,t)>a(f,h)&&(h=t):a(t,h)>a(f,h)&&(f=t)}else n(t,e);y=r,g=t}function e(){b.point=t}function r(){x[0]=f,x[1]=h,b.point=n,y=null}function i(n,e){if(y){var r=n-g;m+=xo(r)>180?r+(r>0?360:-360):r}else v=n,d=e;Na.point(n,e),t(n,e)}function u(){Na.lineStart()}function o(){i(v,d),Na.lineEnd(),xo(m)>Uo&&(f=-(h=180)),x[0]=f,x[1]=h,y=null}function a(n,t){return(t-=n)<0?t+360:t}function l(n,t){return n[0]-t[0]}function c(n,t){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:n<t[0]||t[1]<n}var f,s,h,p,g,v,d,y,m,M,x,b={point:n,lineStart:e,lineEnd:r,polygonStart:function(){b.point=i,b.lineStart=u,b.lineEnd=o,m=0,Na.polygonStart()},polygonEnd:function(){Na.polygonEnd(),b.point=n,b.lineStart=e,b.lineEnd=r,0>ka?(f=-(h=180),s=-(p=90)):m>Uo?p=90:-Uo>m&&(s=-90),x[0]=f,x[1]=h}};return function(n){p=h=-(f=s=1/0),M=[],ao.geo.stream(n,b);var t=M.length;if(t){M.sort(l);for(var e,r=1,i=M[0],u=[i];t>r;++r)e=M[r],c(e[0],i)||c(e[1],i)?(a(i[0],e[1])>a(i[0],i[1])&&(i[1]=e[1]),a(e[0],i[1])>a(i[0],i[1])&&(i[0]=e[0])):u.push(i=e);for(var o,e,g=-(1/0),t=u.length-1,r=0,i=u[t];t>=r;i=e,++r)e=u[r],(o=a(i[1],e[0]))>g&&(g=o,f=e[0],h=i[1])}return M=x=null,f===1/0||s===1/0?[[NaN,NaN],[NaN,NaN]]:[[f,s],[h,p]]}}(),ao.geo.centroid=function(n){Ea=Aa=Ca=za=La=qa=Ta=Ra=Da=Pa=Ua=0,ao.geo.stream(n,ja);var t=Da,e=Pa,r=Ua,i=t*t+e*e+r*r;return jo>i&&(t=qa,e=Ta,r=Ra,Uo>Aa&&(t=Ca,e=za,r=La),i=t*t+e*e+r*r,jo>i)?[NaN,NaN]:[Math.atan2(e,t)*Zo,tn(r/Math.sqrt(i))*Zo]};var Ea,Aa,Ca,za,La,qa,Ta,Ra,Da,Pa,Ua,ja={sphere:b,point:St,lineStart:Nt,lineEnd:Et,polygonStart:function(){ja.lineStart=At},polygonEnd:function(){ja.lineStart=Nt}},Fa=Rt(zt,jt,Ht,[-Fo,-Fo/2]),Ha=1e9;ao.geo.clipExtent=function(){var n,t,e,r,i,u,o={stream:function(n){return i&&(i.valid=!1),i=u(n),i.valid=!0,i},extent:function(a){return arguments.length?(u=Zt(n=+a[0][0],t=+a[0][1],e=+a[1][0],r=+a[1][1]),i&&(i.valid=!1,i=null),o):[[n,t],[e,r]]}};return o.extent([[0,0],[960,500]])},(ao.geo.conicEqualArea=function(){return Vt(Xt)}).raw=Xt,ao.geo.albers=function(){return ao.geo.conicEqualArea().rotate([96,0]).center([-.6,38.7]).parallels([29.5,45.5]).scale(1070)},ao.geo.albersUsa=function(){function n(n){var u=n[0],o=n[1];return t=null,e(u,o),t||(r(u,o),t)||i(u,o),t}var t,e,r,i,u=ao.geo.albers(),o=ao.geo.conicEqualArea().rotate([154,0]).center([-2,58.5]).parallels([55,65]),a=ao.geo.conicEqualArea().rotate([157,0]).center([-3,19.9]).parallels([8,18]),l={point:function(n,e){t=[n,e]}};return n.invert=function(n){var t=u.scale(),e=u.translate(),r=(n[0]-e[0])/t,i=(n[1]-e[1])/t;return(i>=.12&&.234>i&&r>=-.425&&-.214>r?o:i>=.166&&.234>i&&r>=-.214&&-.115>r?a:u).invert(n)},n.stream=function(n){var t=u.stream(n),e=o.stream(n),r=a.stream(n);return{point:function(n,i){t.point(n,i),e.point(n,i),r.point(n,i)},sphere:function(){t.sphere(),e.sphere(),r.sphere()},lineStart:function(){t.lineStart(),e.lineStart(),r.lineStart()},lineEnd:function(){t.lineEnd(),e.lineEnd(),r.lineEnd()},polygonStart:function(){t.polygonStart(),e.polygonStart(),r.polygonStart()},polygonEnd:function(){t.polygonEnd(),e.polygonEnd(),r.polygonEnd()}}},n.precision=function(t){return arguments.length?(u.precision(t),o.precision(t),a.precision(t),n):u.precision()},n.scale=function(t){return arguments.length?(u.scale(t),o.scale(.35*t),a.scale(t),n.translate(u.translate())):u.scale()},n.translate=function(t){if(!arguments.length)return u.translate();var c=u.scale(),f=+t[0],s=+t[1];return e=u.translate(t).clipExtent([[f-.455*c,s-.238*c],[f+.455*c,s+.238*c]]).stream(l).point,r=o.translate([f-.307*c,s+.201*c]).clipExtent([[f-.425*c+Uo,s+.12*c+Uo],[f-.214*c-Uo,s+.234*c-Uo]]).stream(l).point,i=a.translate([f-.205*c,s+.212*c]).clipExtent([[f-.214*c+Uo,s+.166*c+Uo],[f-.115*c-Uo,s+.234*c-Uo]]).stream(l).point,n},n.scale(1070)};var Oa,Ia,Ya,Za,Va,Xa,$a={point:b,lineStart:b,lineEnd:b,polygonStart:function(){Ia=0,$a.lineStart=$t},polygonEnd:function(){$a.lineStart=$a.lineEnd=$a.point=b,Oa+=xo(Ia/2)}},Ba={point:Bt,lineStart:b,lineEnd:b,polygonStart:b,polygonEnd:b},Wa={point:Gt,lineStart:Kt,lineEnd:Qt,polygonStart:function(){Wa.lineStart=ne},polygonEnd:function(){Wa.point=Gt,Wa.lineStart=Kt,Wa.lineEnd=Qt}};ao.geo.path=function(){function n(n){return n&&("function"==typeof a&&u.pointRadius(+a.apply(this,arguments)),o&&o.valid||(o=i(u)),ao.geo.stream(n,o)),u.result()}function t(){return o=null,n}var e,r,i,u,o,a=4.5;return n.area=function(n){return Oa=0,ao.geo.stream(n,i($a)),Oa},n.centroid=function(n){return Ca=za=La=qa=Ta=Ra=Da=Pa=Ua=0,ao.geo.stream(n,i(Wa)),Ua?[Da/Ua,Pa/Ua]:Ra?[qa/Ra,Ta/Ra]:La?[Ca/La,za/La]:[NaN,NaN]},n.bounds=function(n){return Va=Xa=-(Ya=Za=1/0),ao.geo.stream(n,i(Ba)),[[Ya,Za],[Va,Xa]]},n.projection=function(n){return arguments.length?(i=(e=n)?n.stream||re(n):m,t()):e},n.context=function(n){return arguments.length?(u=null==(r=n)?new Wt:new te(n),"function"!=typeof a&&u.pointRadius(a),t()):r},n.pointRadius=function(t){return arguments.length?(a="function"==typeof t?t:(u.pointRadius(+t),+t),n):a},n.projection(ao.geo.albersUsa()).context(null)},ao.geo.transform=function(n){return{stream:function(t){var e=new ie(t);for(var r in n)e[r]=n[r];return e}}},ie.prototype={point:function(n,t){this.stream.point(n,t)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}},ao.geo.projection=oe,ao.geo.projectionMutator=ae,(ao.geo.equirectangular=function(){return oe(ce)}).raw=ce.invert=ce,ao.geo.rotation=function(n){function t(t){return t=n(t[0]*Yo,t[1]*Yo),t[0]*=Zo,t[1]*=Zo,t}return n=se(n[0]%360*Yo,n[1]*Yo,n.length>2?n[2]*Yo:0),t.invert=function(t){return t=n.invert(t[0]*Yo,t[1]*Yo),t[0]*=Zo,t[1]*=Zo,t},t},fe.invert=ce,ao.geo.circle=function(){function n(){var n="function"==typeof r?r.apply(this,arguments):r,t=se(-n[0]*Yo,-n[1]*Yo,0).invert,i=[];return e(null,null,1,{point:function(n,e){i.push(n=t(n,e)),n[0]*=Zo,n[1]*=Zo}}),{type:"Polygon",coordinates:[i]}}var t,e,r=[0,0],i=6;return n.origin=function(t){return arguments.length?(r=t,n):r},n.angle=function(r){return arguments.length?(e=ve((t=+r)*Yo,i*Yo),n):t},n.precision=function(r){return arguments.length?(e=ve(t*Yo,(i=+r)*Yo),n):i},n.angle(90)},ao.geo.distance=function(n,t){var e,r=(t[0]-n[0])*Yo,i=n[1]*Yo,u=t[1]*Yo,o=Math.sin(r),a=Math.cos(r),l=Math.sin(i),c=Math.cos(i),f=Math.sin(u),s=Math.cos(u);return Math.atan2(Math.sqrt((e=s*o)*e+(e=c*f-l*s*a)*e),l*f+c*s*a)},ao.geo.graticule=function(){function n(){return{type:"MultiLineString",coordinates:t()}}function t(){return ao.range(Math.ceil(u/d)*d,i,d).map(h).concat(ao.range(Math.ceil(c/y)*y,l,y).map(p)).concat(ao.range(Math.ceil(r/g)*g,e,g).filter(function(n){return xo(n%d)>Uo}).map(f)).concat(ao.range(Math.ceil(a/v)*v,o,v).filter(function(n){return xo(n%y)>Uo}).map(s))}var e,r,i,u,o,a,l,c,f,s,h,p,g=10,v=g,d=90,y=360,m=2.5;return n.lines=function(){return t().map(function(n){return{type:"LineString",coordinates:n}})},n.outline=function(){return{type:"Polygon",coordinates:[h(u).concat(p(l).slice(1),h(i).reverse().slice(1),p(c).reverse().slice(1))]}},n.extent=function(t){return arguments.length?n.majorExtent(t).minorExtent(t):n.minorExtent()},n.majorExtent=function(t){return arguments.length?(u=+t[0][0],i=+t[1][0],c=+t[0][1],l=+t[1][1],u>i&&(t=u,u=i,i=t),c>l&&(t=c,c=l,l=t),n.precision(m)):[[u,c],[i,l]]},n.minorExtent=function(t){return arguments.length?(r=+t[0][0],e=+t[1][0],a=+t[0][1],o=+t[1][1],r>e&&(t=r,r=e,e=t),a>o&&(t=a,a=o,o=t),n.precision(m)):[[r,a],[e,o]]},n.step=function(t){return arguments.length?n.majorStep(t).minorStep(t):n.minorStep()},n.majorStep=function(t){return arguments.length?(d=+t[0],y=+t[1],n):[d,y]},n.minorStep=function(t){return arguments.length?(g=+t[0],v=+t[1],n):[g,v]},n.precision=function(t){return arguments.length?(m=+t,f=ye(a,o,90),s=me(r,e,m),h=ye(c,l,90),p=me(u,i,m),n):m},n.majorExtent([[-180,-90+Uo],[180,90-Uo]]).minorExtent([[-180,-80-Uo],[180,80+Uo]])},ao.geo.greatArc=function(){function n(){return{type:"LineString",coordinates:[t||r.apply(this,arguments),e||i.apply(this,arguments)]}}var t,e,r=Me,i=xe;return n.distance=function(){return ao.geo.distance(t||r.apply(this,arguments),e||i.apply(this,arguments))},n.source=function(e){return arguments.length?(r=e,t="function"==typeof e?null:e,n):r},n.target=function(t){return arguments.length?(i=t,e="function"==typeof t?null:t,n):i},n.precision=function(){return arguments.length?n:0},n},ao.geo.interpolate=function(n,t){return be(n[0]*Yo,n[1]*Yo,t[0]*Yo,t[1]*Yo)},ao.geo.length=function(n){return Ja=0,ao.geo.stream(n,Ga),Ja};var Ja,Ga={sphere:b,point:b,lineStart:_e,lineEnd:b,polygonStart:b,polygonEnd:b},Ka=we(function(n){return Math.sqrt(2/(1+n))},function(n){return 2*Math.asin(n/2)});(ao.geo.azimuthalEqualArea=function(){return oe(Ka)}).raw=Ka;var Qa=we(function(n){var t=Math.acos(n);return t&&t/Math.sin(t)},m);(ao.geo.azimuthalEquidistant=function(){return oe(Qa)}).raw=Qa,(ao.geo.conicConformal=function(){return Vt(Se)}).raw=Se,(ao.geo.conicEquidistant=function(){return Vt(ke)}).raw=ke;var nl=we(function(n){return 1/n},Math.atan);(ao.geo.gnomonic=function(){return oe(nl)}).raw=nl,Ne.invert=function(n,t){return[n,2*Math.atan(Math.exp(t))-Io]},(ao.geo.mercator=function(){return Ee(Ne)}).raw=Ne;var tl=we(function(){return 1},Math.asin);(ao.geo.orthographic=function(){return oe(tl)}).raw=tl;var el=we(function(n){return 1/(1+n)},function(n){return 2*Math.atan(n)});(ao.geo.stereographic=function(){return oe(el)}).raw=el,Ae.invert=function(n,t){return[-t,2*Math.atan(Math.exp(n))-Io]},(ao.geo.transverseMercator=function(){var n=Ee(Ae),t=n.center,e=n.rotate;return n.center=function(n){return n?t([-n[1],n[0]]):(n=t(),[n[1],-n[0]])},n.rotate=function(n){return n?e([n[0],n[1],n.length>2?n[2]+90:90]):(n=e(),[n[0],n[1],n[2]-90])},e([0,0,90])}).raw=Ae,ao.geom={},ao.geom.hull=function(n){function t(n){if(n.length<3)return[];var t,i=En(e),u=En(r),o=n.length,a=[],l=[];for(t=0;o>t;t++)a.push([+i.call(this,n[t],t),+u.call(this,n[t],t),t]);for(a.sort(qe),t=0;o>t;t++)l.push([a[t][0],-a[t][1]]);var c=Le(a),f=Le(l),s=f[0]===c[0],h=f[f.length-1]===c[c.length-1],p=[];for(t=c.length-1;t>=0;--t)p.push(n[a[c[t]][2]]);for(t=+s;t<f.length-h;++t)p.push(n[a[f[t]][2]]);return p}var e=Ce,r=ze;return arguments.length?t(n):(t.x=function(n){return arguments.length?(e=n,t):e},t.y=function(n){return arguments.length?(r=n,t):r},t)},ao.geom.polygon=function(n){return ko(n,rl),n};var rl=ao.geom.polygon.prototype=[];rl.area=function(){for(var n,t=-1,e=this.length,r=this[e-1],i=0;++t<e;)n=r,r=this[t],i+=n[1]*r[0]-n[0]*r[1];return.5*i},rl.centroid=function(n){var t,e,r=-1,i=this.length,u=0,o=0,a=this[i-1];for(arguments.length||(n=-1/(6*this.area()));++r<i;)t=a,a=this[r],e=t[0]*a[1]-a[0]*t[1],u+=(t[0]+a[0])*e,o+=(t[1]+a[1])*e;return[u*n,o*n]},rl.clip=function(n){for(var t,e,r,i,u,o,a=De(n),l=-1,c=this.length-De(this),f=this[c-1];++l<c;){for(t=n.slice(),n.length=0,i=this[l],u=t[(r=t.length-a)-1],e=-1;++e<r;)o=t[e],Te(o,f,i)?(Te(u,f,i)||n.push(Re(u,o,f,i)),n.push(o)):Te(u,f,i)&&n.push(Re(u,o,f,i)),u=o;a&&n.push(n[0]),f=i}return n};var il,ul,ol,al,ll,cl=[],fl=[];Ye.prototype.prepare=function(){for(var n,t=this.edges,e=t.length;e--;)n=t[e].edge,n.b&&n.a||t.splice(e,1);return t.sort(Ve),t.length},tr.prototype={start:function(){return this.edge.l===this.site?this.edge.a:this.edge.b},end:function(){return this.edge.l===this.site?this.edge.b:this.edge.a}},er.prototype={insert:function(n,t){var e,r,i;if(n){if(t.P=n,t.N=n.N,n.N&&(n.N.P=t),n.N=t,n.R){for(n=n.R;n.L;)n=n.L;n.L=t}else n.R=t;e=n}else this._?(n=or(this._),t.P=null,t.N=n,n.P=n.L=t,e=n):(t.P=t.N=null,this._=t,e=null);for(t.L=t.R=null,t.U=e,t.C=!0,n=t;e&&e.C;)r=e.U,e===r.L?(i=r.R,i&&i.C?(e.C=i.C=!1,r.C=!0,n=r):(n===e.R&&(ir(this,e),n=e,e=n.U),e.C=!1,r.C=!0,ur(this,r))):(i=r.L,i&&i.C?(e.C=i.C=!1,r.C=!0,n=r):(n===e.L&&(ur(this,e),n=e,e=n.U),e.C=!1,r.C=!0,ir(this,r))),e=n.U;this._.C=!1},remove:function(n){n.N&&(n.N.P=n.P),n.P&&(n.P.N=n.N),n.N=n.P=null;var t,e,r,i=n.U,u=n.L,o=n.R;if(e=u?o?or(o):u:o,i?i.L===n?i.L=e:i.R=e:this._=e,u&&o?(r=e.C,e.C=n.C,e.L=u,u.U=e,e!==o?(i=e.U,e.U=n.U,n=e.R,i.L=n,e.R=o,o.U=e):(e.U=i,i=e,n=e.R)):(r=n.C,n=e),n&&(n.U=i),!r){if(n&&n.C)return void(n.C=!1);do{if(n===this._)break;if(n===i.L){if(t=i.R,t.C&&(t.C=!1,i.C=!0,ir(this,i),t=i.R),t.L&&t.L.C||t.R&&t.R.C){t.R&&t.R.C||(t.L.C=!1,t.C=!0,ur(this,t),t=i.R),t.C=i.C,i.C=t.R.C=!1,ir(this,i),n=this._;break}}else if(t=i.L,t.C&&(t.C=!1,i.C=!0,ur(this,i),t=i.L),t.L&&t.L.C||t.R&&t.R.C){t.L&&t.L.C||(t.R.C=!1,t.C=!0,ir(this,t),t=i.L),t.C=i.C,i.C=t.L.C=!1,ur(this,i),n=this._;break}t.C=!0,n=i,i=i.U}while(!n.C);n&&(n.C=!1)}}},ao.geom.voronoi=function(n){function t(n){var t=new Array(n.length),r=a[0][0],i=a[0][1],u=a[1][0],o=a[1][1];return ar(e(n),a).cells.forEach(function(e,a){var l=e.edges,c=e.site,f=t[a]=l.length?l.map(function(n){var t=n.start();return[t.x,t.y]}):c.x>=r&&c.x<=u&&c.y>=i&&c.y<=o?[[r,o],[u,o],[u,i],[r,i]]:[];f.point=n[a]}),t}function e(n){return n.map(function(n,t){return{x:Math.round(u(n,t)/Uo)*Uo,y:Math.round(o(n,t)/Uo)*Uo,i:t}})}var r=Ce,i=ze,u=r,o=i,a=sl;return n?t(n):(t.links=function(n){return ar(e(n)).edges.filter(function(n){return n.l&&n.r}).map(function(t){return{source:n[t.l.i],target:n[t.r.i]}})},t.triangles=function(n){var t=[];return ar(e(n)).cells.forEach(function(e,r){for(var i,u,o=e.site,a=e.edges.sort(Ve),l=-1,c=a.length,f=a[c-1].edge,s=f.l===o?f.r:f.l;++l<c;)i=f,u=s,f=a[l].edge,s=f.l===o?f.r:f.l,r<u.i&&r<s.i&&cr(o,u,s)<0&&t.push([n[r],n[u.i],n[s.i]])}),t},t.x=function(n){return arguments.length?(u=En(r=n),t):r},t.y=function(n){return arguments.length?(o=En(i=n),t):i},t.clipExtent=function(n){return arguments.length?(a=null==n?sl:n,t):a===sl?null:a},t.size=function(n){return arguments.length?t.clipExtent(n&&[[0,0],n]):a===sl?null:a&&a[1]},t)};var sl=[[-1e6,-1e6],[1e6,1e6]];ao.geom.delaunay=function(n){return ao.geom.voronoi().triangles(n)},ao.geom.quadtree=function(n,t,e,r,i){function u(n){function u(n,t,e,r,i,u,o,a){if(!isNaN(e)&&!isNaN(r))if(n.leaf){var l=n.x,f=n.y;if(null!=l)if(xo(l-e)+xo(f-r)<.01)c(n,t,e,r,i,u,o,a);else{var s=n.point;n.x=n.y=n.point=null,c(n,s,l,f,i,u,o,a),c(n,t,e,r,i,u,o,a)}else n.x=e,n.y=r,n.point=t}else c(n,t,e,r,i,u,o,a)}function c(n,t,e,r,i,o,a,l){var c=.5*(i+a),f=.5*(o+l),s=e>=c,h=r>=f,p=h<<1|s;n.leaf=!1,n=n.nodes[p]||(n.nodes[p]=hr()),s?i=c:a=c,h?o=f:l=f,u(n,t,e,r,i,o,a,l)}var f,s,h,p,g,v,d,y,m,M=En(a),x=En(l);if(null!=t)v=t,d=e,y=r,m=i;else if(y=m=-(v=d=1/0),s=[],h=[],g=n.length,o)for(p=0;g>p;++p)f=n[p],f.x<v&&(v=f.x),f.y<d&&(d=f.y),f.x>y&&(y=f.x),f.y>m&&(m=f.y),s.push(f.x),h.push(f.y);else for(p=0;g>p;++p){var b=+M(f=n[p],p),_=+x(f,p);v>b&&(v=b),d>_&&(d=_),b>y&&(y=b),_>m&&(m=_),s.push(b),h.push(_)}var w=y-v,S=m-d;w>S?m=d+w:y=v+S;var k=hr();if(k.add=function(n){u(k,n,+M(n,++p),+x(n,p),v,d,y,m)},k.visit=function(n){pr(n,k,v,d,y,m)},k.find=function(n){return gr(k,n[0],n[1],v,d,y,m)},p=-1,null==t){for(;++p<g;)u(k,n[p],s[p],h[p],v,d,y,m);--p}else n.forEach(k.add);return s=h=n=f=null,k}var o,a=Ce,l=ze;return(o=arguments.length)?(a=fr,l=sr,3===o&&(i=e,r=t,e=t=0),u(n)):(u.x=function(n){return arguments.length?(a=n,u):a},u.y=function(n){return arguments.length?(l=n,u):l},u.extent=function(n){return arguments.length?(null==n?t=e=r=i=null:(t=+n[0][0],e=+n[0][1],r=+n[1][0],i=+n[1][1]),u):null==t?null:[[t,e],[r,i]]},u.size=function(n){return arguments.length?(null==n?t=e=r=i=null:(t=e=0,r=+n[0],i=+n[1]),u):null==t?null:[r-t,i-e]},u)},ao.interpolateRgb=vr,ao.interpolateObject=dr,ao.interpolateNumber=yr,ao.interpolateString=mr;var hl=/[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g,pl=new RegExp(hl.source,"g");ao.interpolate=Mr,ao.interpolators=[function(n,t){var e=typeof t;return("string"===e?ua.has(t.toLowerCase())||/^(#|rgb\(|hsl\()/i.test(t)?vr:mr:t instanceof an?vr:Array.isArray(t)?xr:"object"===e&&isNaN(t)?dr:yr)(n,t)}],ao.interpolateArray=xr;var gl=function(){return m},vl=ao.map({linear:gl,poly:Er,quad:function(){return Sr},cubic:function(){return kr},sin:function(){return Ar},exp:function(){return Cr},circle:function(){return zr},elastic:Lr,back:qr,bounce:function(){return Tr}}),dl=ao.map({"in":m,out:_r,"in-out":wr,"out-in":function(n){return wr(_r(n))}});ao.ease=function(n){var t=n.indexOf("-"),e=t>=0?n.slice(0,t):n,r=t>=0?n.slice(t+1):"in";return e=vl.get(e)||gl,r=dl.get(r)||m,br(r(e.apply(null,lo.call(arguments,1))))},ao.interpolateHcl=Rr,ao.interpolateHsl=Dr,ao.interpolateLab=Pr,ao.interpolateRound=Ur,ao.transform=function(n){var t=fo.createElementNS(ao.ns.prefix.svg,"g");return(ao.transform=function(n){if(null!=n){t.setAttribute("transform",n);var e=t.transform.baseVal.consolidate()}return new jr(e?e.matrix:yl)})(n)},jr.prototype.toString=function(){return"translate("+this.translate+")rotate("+this.rotate+")skewX("+this.skew+")scale("+this.scale+")"};var yl={a:1,b:0,c:0,d:1,e:0,f:0};ao.interpolateTransform=$r,ao.layout={},ao.layout.bundle=function(){return function(n){for(var t=[],e=-1,r=n.length;++e<r;)t.push(Jr(n[e]));return t}},ao.layout.chord=function(){function n(){var n,c,s,h,p,g={},v=[],d=ao.range(u),y=[];for(e=[],r=[],n=0,h=-1;++h<u;){for(c=0,p=-1;++p<u;)c+=i[h][p];v.push(c),y.push(ao.range(u)),n+=c}for(o&&d.sort(function(n,t){return o(v[n],v[t])}),a&&y.forEach(function(n,t){n.sort(function(n,e){return a(i[t][n],i[t][e])})}),n=(Ho-f*u)/n,c=0,h=-1;++h<u;){for(s=c,p=-1;++p<u;){var m=d[h],M=y[m][p],x=i[m][M],b=c,_=c+=x*n;g[m+"-"+M]={index:m,subindex:M,startAngle:b,endAngle:_,value:x}}r[m]={index:m,startAngle:s,endAngle:c,value:v[m]},c+=f}for(h=-1;++h<u;)for(p=h-1;++p<u;){var w=g[h+"-"+p],S=g[p+"-"+h];(w.value||S.value)&&e.push(w.value<S.value?{source:S,target:w}:{source:w,target:S})}l&&t()}function t(){e.sort(function(n,t){return l((n.source.value+n.target.value)/2,(t.source.value+t.target.value)/2)})}var e,r,i,u,o,a,l,c={},f=0;return c.matrix=function(n){return arguments.length?(u=(i=n)&&i.length,e=r=null,c):i},c.padding=function(n){return arguments.length?(f=n,e=r=null,c):f},c.sortGroups=function(n){return arguments.length?(o=n,e=r=null,c):o},c.sortSubgroups=function(n){return arguments.length?(a=n,e=null,c):a},c.sortChords=function(n){return arguments.length?(l=n,e&&t(),c):l},c.chords=function(){return e||n(),e},c.groups=function(){return r||n(),r},c},ao.layout.force=function(){function n(n){return function(t,e,r,i){if(t.point!==n){var u=t.cx-n.x,o=t.cy-n.y,a=i-e,l=u*u+o*o;if(l>a*a/y){if(v>l){var c=t.charge/l;n.px-=u*c,n.py-=o*c}return!0}if(t.point&&l&&v>l){var c=t.pointCharge/l;n.px-=u*c,n.py-=o*c}}return!t.charge}}function t(n){n.px=ao.event.x,n.py=ao.event.y,l.resume()}var e,r,i,u,o,a,l={},c=ao.dispatch("start","tick","end"),f=[1,1],s=.9,h=ml,p=Ml,g=-30,v=xl,d=.1,y=.64,M=[],x=[];return l.tick=function(){if((i*=.99)<.005)return e=null,c.end({type:"end",alpha:i=0}),!0;var t,r,l,h,p,v,y,m,b,_=M.length,w=x.length;for(r=0;w>r;++r)l=x[r],h=l.source,p=l.target,m=p.x-h.x,b=p.y-h.y,(v=m*m+b*b)&&(v=i*o[r]*((v=Math.sqrt(v))-u[r])/v,m*=v,b*=v,p.x-=m*(y=h.weight+p.weight?h.weight/(h.weight+p.weight):.5),p.y-=b*y,h.x+=m*(y=1-y),h.y+=b*y);if((y=i*d)&&(m=f[0]/2,b=f[1]/2,r=-1,y))for(;++r<_;)l=M[r],l.x+=(m-l.x)*y,l.y+=(b-l.y)*y;if(g)for(ri(t=ao.geom.quadtree(M),i,a),r=-1;++r<_;)(l=M[r]).fixed||t.visit(n(l));for(r=-1;++r<_;)l=M[r],l.fixed?(l.x=l.px,l.y=l.py):(l.x-=(l.px-(l.px=l.x))*s,l.y-=(l.py-(l.py=l.y))*s);c.tick({type:"tick",alpha:i})},l.nodes=function(n){return arguments.length?(M=n,l):M},l.links=function(n){return arguments.length?(x=n,l):x},l.size=function(n){return arguments.length?(f=n,l):f},l.linkDistance=function(n){return arguments.length?(h="function"==typeof n?n:+n,l):h},l.distance=l.linkDistance,l.linkStrength=function(n){return arguments.length?(p="function"==typeof n?n:+n,l):p},l.friction=function(n){return arguments.length?(s=+n,l):s},l.charge=function(n){return arguments.length?(g="function"==typeof n?n:+n,l):g},l.chargeDistance=function(n){return arguments.length?(v=n*n,l):Math.sqrt(v)},l.gravity=function(n){return arguments.length?(d=+n,l):d},l.theta=function(n){return arguments.length?(y=n*n,l):Math.sqrt(y)},l.alpha=function(n){return arguments.length?(n=+n,i?n>0?i=n:(e.c=null,e.t=NaN,e=null,c.end({type:"end",alpha:i=0})):n>0&&(c.start({type:"start",alpha:i=n}),e=qn(l.tick)),l):i},l.start=function(){function n(n,r){if(!e){for(e=new Array(i),l=0;i>l;++l)e[l]=[];for(l=0;c>l;++l){var u=x[l];e[u.source.index].push(u.target),e[u.target.index].push(u.source)}}for(var o,a=e[t],l=-1,f=a.length;++l<f;)if(!isNaN(o=a[l][n]))return o;return Math.random()*r}var t,e,r,i=M.length,c=x.length,s=f[0],v=f[1];for(t=0;i>t;++t)(r=M[t]).index=t,r.weight=0;for(t=0;c>t;++t)r=x[t],"number"==typeof r.source&&(r.source=M[r.source]),"number"==typeof r.target&&(r.target=M[r.target]),++r.source.weight,++r.target.weight;for(t=0;i>t;++t)r=M[t],isNaN(r.x)&&(r.x=n("x",s)),isNaN(r.y)&&(r.y=n("y",v)),isNaN(r.px)&&(r.px=r.x),isNaN(r.py)&&(r.py=r.y);if(u=[],"function"==typeof h)for(t=0;c>t;++t)u[t]=+h.call(this,x[t],t);else for(t=0;c>t;++t)u[t]=h;if(o=[],"function"==typeof p)for(t=0;c>t;++t)o[t]=+p.call(this,x[t],t);else for(t=0;c>t;++t)o[t]=p;if(a=[],"function"==typeof g)for(t=0;i>t;++t)a[t]=+g.call(this,M[t],t);else for(t=0;i>t;++t)a[t]=g;return l.resume()},l.resume=function(){return l.alpha(.1)},l.stop=function(){return l.alpha(0)},l.drag=function(){return r||(r=ao.behavior.drag().origin(m).on("dragstart.force",Qr).on("drag.force",t).on("dragend.force",ni)),arguments.length?void this.on("mouseover.force",ti).on("mouseout.force",ei).call(r):r},ao.rebind(l,c,"on")};var ml=20,Ml=1,xl=1/0;ao.layout.hierarchy=function(){function n(i){var u,o=[i],a=[];for(i.depth=0;null!=(u=o.pop());)if(a.push(u),(c=e.call(n,u,u.depth))&&(l=c.length)){for(var l,c,f;--l>=0;)o.push(f=c[l]),f.parent=u,f.depth=u.depth+1;r&&(u.value=0),u.children=c}else r&&(u.value=+r.call(n,u,u.depth)||0),delete u.children;return oi(i,function(n){var e,i;t&&(e=n.children)&&e.sort(t),r&&(i=n.parent)&&(i.value+=n.value)}),a}var t=ci,e=ai,r=li;return n.sort=function(e){return arguments.length?(t=e,n):t},n.children=function(t){return arguments.length?(e=t,n):e},n.value=function(t){return arguments.length?(r=t,n):r},n.revalue=function(t){return r&&(ui(t,function(n){n.children&&(n.value=0)}),oi(t,function(t){var e;t.children||(t.value=+r.call(n,t,t.depth)||0),(e=t.parent)&&(e.value+=t.value)})),t},n},ao.layout.partition=function(){function n(t,e,r,i){var u=t.children;if(t.x=e,t.y=t.depth*i,t.dx=r,t.dy=i,u&&(o=u.length)){var o,a,l,c=-1;for(r=t.value?r/t.value:0;++c<o;)n(a=u[c],e,l=a.value*r,i),e+=l}}function t(n){var e=n.children,r=0;if(e&&(i=e.length))for(var i,u=-1;++u<i;)r=Math.max(r,t(e[u]));return 1+r}function e(e,u){var o=r.call(this,e,u);return n(o[0],0,i[0],i[1]/t(o[0])),o}var r=ao.layout.hierarchy(),i=[1,1];return e.size=function(n){return arguments.length?(i=n,e):i},ii(e,r)},ao.layout.pie=function(){function n(o){var a,l=o.length,c=o.map(function(e,r){return+t.call(n,e,r)}),f=+("function"==typeof r?r.apply(this,arguments):r),s=("function"==typeof i?i.apply(this,arguments):i)-f,h=Math.min(Math.abs(s)/l,+("function"==typeof u?u.apply(this,arguments):u)),p=h*(0>s?-1:1),g=ao.sum(c),v=g?(s-l*p)/g:0,d=ao.range(l),y=[];return null!=e&&d.sort(e===bl?function(n,t){return c[t]-c[n]}:function(n,t){return e(o[n],o[t])}),d.forEach(function(n){y[n]={data:o[n],value:a=c[n],startAngle:f,endAngle:f+=a*v+p,padAngle:h}}),y}var t=Number,e=bl,r=0,i=Ho,u=0;return n.value=function(e){return arguments.length?(t=e,n):t},n.sort=function(t){return arguments.length?(e=t,n):e},n.startAngle=function(t){return arguments.length?(r=t,n):r},n.endAngle=function(t){return arguments.length?(i=t,n):i},n.padAngle=function(t){return arguments.length?(u=t,n):u},n};var bl={};ao.layout.stack=function(){function n(a,l){if(!(h=a.length))return a;var c=a.map(function(e,r){return t.call(n,e,r)}),f=c.map(function(t){return t.map(function(t,e){return[u.call(n,t,e),o.call(n,t,e)]})}),s=e.call(n,f,l);c=ao.permute(c,s),f=ao.permute(f,s);var h,p,g,v,d=r.call(n,f,l),y=c[0].length;for(g=0;y>g;++g)for(i.call(n,c[0][g],v=d[g],f[0][g][1]),p=1;h>p;++p)i.call(n,c[p][g],v+=f[p-1][g][1],f[p][g][1]);return a}var t=m,e=gi,r=vi,i=pi,u=si,o=hi;return n.values=function(e){return arguments.length?(t=e,n):t},n.order=function(t){return arguments.length?(e="function"==typeof t?t:_l.get(t)||gi,n):e},n.offset=function(t){return arguments.length?(r="function"==typeof t?t:wl.get(t)||vi,n):r},n.x=function(t){return arguments.length?(u=t,n):u},n.y=function(t){return arguments.length?(o=t,n):o},n.out=function(t){return arguments.length?(i=t,n):i},n};var _l=ao.map({"inside-out":function(n){var t,e,r=n.length,i=n.map(di),u=n.map(yi),o=ao.range(r).sort(function(n,t){return i[n]-i[t]}),a=0,l=0,c=[],f=[];for(t=0;r>t;++t)e=o[t],l>a?(a+=u[e],c.push(e)):(l+=u[e],f.push(e));return f.reverse().concat(c)},reverse:function(n){return ao.range(n.length).reverse()},"default":gi}),wl=ao.map({silhouette:function(n){var t,e,r,i=n.length,u=n[0].length,o=[],a=0,l=[];for(e=0;u>e;++e){for(t=0,r=0;i>t;t++)r+=n[t][e][1];r>a&&(a=r),o.push(r)}for(e=0;u>e;++e)l[e]=(a-o[e])/2;return l},wiggle:function(n){var t,e,r,i,u,o,a,l,c,f=n.length,s=n[0],h=s.length,p=[];for(p[0]=l=c=0,e=1;h>e;++e){for(t=0,i=0;f>t;++t)i+=n[t][e][1];for(t=0,u=0,a=s[e][0]-s[e-1][0];f>t;++t){for(r=0,o=(n[t][e][1]-n[t][e-1][1])/(2*a);t>r;++r)o+=(n[r][e][1]-n[r][e-1][1])/a;u+=o*n[t][e][1]}p[e]=l-=i?u/i*a:0,c>l&&(c=l)}for(e=0;h>e;++e)p[e]-=c;return p},expand:function(n){var t,e,r,i=n.length,u=n[0].length,o=1/i,a=[];for(e=0;u>e;++e){for(t=0,r=0;i>t;t++)r+=n[t][e][1];if(r)for(t=0;i>t;t++)n[t][e][1]/=r;else for(t=0;i>t;t++)n[t][e][1]=o}for(e=0;u>e;++e)a[e]=0;return a},zero:vi});ao.layout.histogram=function(){function n(n,u){for(var o,a,l=[],c=n.map(e,this),f=r.call(this,c,u),s=i.call(this,f,c,u),u=-1,h=c.length,p=s.length-1,g=t?1:1/h;++u<p;)o=l[u]=[],o.dx=s[u+1]-(o.x=s[u]),o.y=0;if(p>0)for(u=-1;++u<h;)a=c[u],a>=f[0]&&a<=f[1]&&(o=l[ao.bisect(s,a,1,p)-1],o.y+=g,o.push(n[u]));return l}var t=!0,e=Number,r=bi,i=Mi;return n.value=function(t){return arguments.length?(e=t,n):e},n.range=function(t){return arguments.length?(r=En(t),n):r},n.bins=function(t){return arguments.length?(i="number"==typeof t?function(n){return xi(n,t)}:En(t),n):i},n.frequency=function(e){return arguments.length?(t=!!e,n):t},n},ao.layout.pack=function(){function n(n,u){var o=e.call(this,n,u),a=o[0],l=i[0],c=i[1],f=null==t?Math.sqrt:"function"==typeof t?t:function(){return t};if(a.x=a.y=0,oi(a,function(n){n.r=+f(n.value)}),oi(a,Ni),r){var s=r*(t?1:Math.max(2*a.r/l,2*a.r/c))/2;oi(a,function(n){n.r+=s}),oi(a,Ni),oi(a,function(n){n.r-=s})}return Ci(a,l/2,c/2,t?1:1/Math.max(2*a.r/l,2*a.r/c)),o}var t,e=ao.layout.hierarchy().sort(_i),r=0,i=[1,1];return n.size=function(t){return arguments.length?(i=t,n):i},n.radius=function(e){return arguments.length?(t=null==e||"function"==typeof e?e:+e,n):t},n.padding=function(t){return arguments.length?(r=+t,n):r},ii(n,e)},ao.layout.tree=function(){function n(n,i){var f=o.call(this,n,i),s=f[0],h=t(s);if(oi(h,e),h.parent.m=-h.z,ui(h,r),c)ui(s,u);else{var p=s,g=s,v=s;ui(s,function(n){n.x<p.x&&(p=n),n.x>g.x&&(g=n),n.depth>v.depth&&(v=n)});var d=a(p,g)/2-p.x,y=l[0]/(g.x+a(g,p)/2+d),m=l[1]/(v.depth||1);ui(s,function(n){n.x=(n.x+d)*y,n.y=n.depth*m})}return f}function t(n){for(var t,e={A:null,children:[n]},r=[e];null!=(t=r.pop());)for(var i,u=t.children,o=0,a=u.length;a>o;++o)r.push((u[o]=i={_:u[o],parent:t,children:(i=u[o].children)&&i.slice()||[],A:null,a:null,z:0,m:0,c:0,s:0,t:null,i:o}).a=i);return e.children[0]}function e(n){var t=n.children,e=n.parent.children,r=n.i?e[n.i-1]:null;if(t.length){Di(n);var u=(t[0].z+t[t.length-1].z)/2;r?(n.z=r.z+a(n._,r._),n.m=n.z-u):n.z=u}else r&&(n.z=r.z+a(n._,r._));n.parent.A=i(n,r,n.parent.A||e[0])}function r(n){n._.x=n.z+n.parent.m,n.m+=n.parent.m}function i(n,t,e){if(t){for(var r,i=n,u=n,o=t,l=i.parent.children[0],c=i.m,f=u.m,s=o.m,h=l.m;o=Ti(o),i=qi(i),o&&i;)l=qi(l),u=Ti(u),u.a=n,r=o.z+s-i.z-c+a(o._,i._),r>0&&(Ri(Pi(o,n,e),n,r),c+=r,f+=r),s+=o.m,c+=i.m,h+=l.m,f+=u.m;o&&!Ti(u)&&(u.t=o,u.m+=s-f),i&&!qi(l)&&(l.t=i,l.m+=c-h,e=n)}return e}function u(n){n.x*=l[0],n.y=n.depth*l[1]}var o=ao.layout.hierarchy().sort(null).value(null),a=Li,l=[1,1],c=null;return n.separation=function(t){return arguments.length?(a=t,n):a},n.size=function(t){return arguments.length?(c=null==(l=t)?u:null,n):c?null:l},n.nodeSize=function(t){return arguments.length?(c=null==(l=t)?null:u,n):c?l:null},ii(n,o)},ao.layout.cluster=function(){function n(n,u){var o,a=t.call(this,n,u),l=a[0],c=0;oi(l,function(n){var t=n.children;t&&t.length?(n.x=ji(t),n.y=Ui(t)):(n.x=o?c+=e(n,o):0,n.y=0,o=n)});var f=Fi(l),s=Hi(l),h=f.x-e(f,s)/2,p=s.x+e(s,f)/2;return oi(l,i?function(n){n.x=(n.x-l.x)*r[0],n.y=(l.y-n.y)*r[1]}:function(n){n.x=(n.x-h)/(p-h)*r[0],n.y=(1-(l.y?n.y/l.y:1))*r[1]}),a}var t=ao.layout.hierarchy().sort(null).value(null),e=Li,r=[1,1],i=!1;return n.separation=function(t){return arguments.length?(e=t,n):e},n.size=function(t){return arguments.length?(i=null==(r=t),n):i?null:r},n.nodeSize=function(t){return arguments.length?(i=null!=(r=t),n):i?r:null},ii(n,t)},ao.layout.treemap=function(){function n(n,t){for(var e,r,i=-1,u=n.length;++i<u;)r=(e=n[i]).value*(0>t?0:t),e.area=isNaN(r)||0>=r?0:r}function t(e){var u=e.children;if(u&&u.length){var o,a,l,c=s(e),f=[],h=u.slice(),g=1/0,v="slice"===p?c.dx:"dice"===p?c.dy:"slice-dice"===p?1&e.depth?c.dy:c.dx:Math.min(c.dx,c.dy);for(n(h,c.dx*c.dy/e.value),f.area=0;(l=h.length)>0;)f.push(o=h[l-1]),f.area+=o.area,"squarify"!==p||(a=r(f,v))<=g?(h.pop(),g=a):(f.area-=f.pop().area,i(f,v,c,!1),v=Math.min(c.dx,c.dy),f.length=f.area=0,g=1/0);f.length&&(i(f,v,c,!0),f.length=f.area=0),u.forEach(t)}}function e(t){var r=t.children;if(r&&r.length){var u,o=s(t),a=r.slice(),l=[];for(n(a,o.dx*o.dy/t.value),l.area=0;u=a.pop();)l.push(u),l.area+=u.area,null!=u.z&&(i(l,u.z?o.dx:o.dy,o,!a.length),l.length=l.area=0);r.forEach(e)}}function r(n,t){for(var e,r=n.area,i=0,u=1/0,o=-1,a=n.length;++o<a;)(e=n[o].area)&&(u>e&&(u=e),e>i&&(i=e));return r*=r,t*=t,r?Math.max(t*i*g/r,r/(t*u*g)):1/0}function i(n,t,e,r){var i,u=-1,o=n.length,a=e.x,c=e.y,f=t?l(n.area/t):0;
+if(t==e.dx){for((r||f>e.dy)&&(f=e.dy);++u<o;)i=n[u],i.x=a,i.y=c,i.dy=f,a+=i.dx=Math.min(e.x+e.dx-a,f?l(i.area/f):0);i.z=!0,i.dx+=e.x+e.dx-a,e.y+=f,e.dy-=f}else{for((r||f>e.dx)&&(f=e.dx);++u<o;)i=n[u],i.x=a,i.y=c,i.dx=f,c+=i.dy=Math.min(e.y+e.dy-c,f?l(i.area/f):0);i.z=!1,i.dy+=e.y+e.dy-c,e.x+=f,e.dx-=f}}function u(r){var i=o||a(r),u=i[0];return u.x=u.y=0,u.value?(u.dx=c[0],u.dy=c[1]):u.dx=u.dy=0,o&&a.revalue(u),n([u],u.dx*u.dy/u.value),(o?e:t)(u),h&&(o=i),i}var o,a=ao.layout.hierarchy(),l=Math.round,c=[1,1],f=null,s=Oi,h=!1,p="squarify",g=.5*(1+Math.sqrt(5));return u.size=function(n){return arguments.length?(c=n,u):c},u.padding=function(n){function t(t){var e=n.call(u,t,t.depth);return null==e?Oi(t):Ii(t,"number"==typeof e?[e,e,e,e]:e)}function e(t){return Ii(t,n)}if(!arguments.length)return f;var r;return s=null==(f=n)?Oi:"function"==(r=typeof n)?t:"number"===r?(n=[n,n,n,n],e):e,u},u.round=function(n){return arguments.length?(l=n?Math.round:Number,u):l!=Number},u.sticky=function(n){return arguments.length?(h=n,o=null,u):h},u.ratio=function(n){return arguments.length?(g=n,u):g},u.mode=function(n){return arguments.length?(p=n+"",u):p},ii(u,a)},ao.random={normal:function(n,t){var e=arguments.length;return 2>e&&(t=1),1>e&&(n=0),function(){var e,r,i;do e=2*Math.random()-1,r=2*Math.random()-1,i=e*e+r*r;while(!i||i>1);return n+t*e*Math.sqrt(-2*Math.log(i)/i)}},logNormal:function(){var n=ao.random.normal.apply(ao,arguments);return function(){return Math.exp(n())}},bates:function(n){var t=ao.random.irwinHall(n);return function(){return t()/n}},irwinHall:function(n){return function(){for(var t=0,e=0;n>e;e++)t+=Math.random();return t}}},ao.scale={};var Sl={floor:m,ceil:m};ao.scale.linear=function(){return Wi([0,1],[0,1],Mr,!1)};var kl={s:1,g:1,p:1,r:1,e:1};ao.scale.log=function(){return ru(ao.scale.linear().domain([0,1]),10,!0,[1,10])};var Nl=ao.format(".0e"),El={floor:function(n){return-Math.ceil(-n)},ceil:function(n){return-Math.floor(-n)}};ao.scale.pow=function(){return iu(ao.scale.linear(),1,[0,1])},ao.scale.sqrt=function(){return ao.scale.pow().exponent(.5)},ao.scale.ordinal=function(){return ou([],{t:"range",a:[[]]})},ao.scale.category10=function(){return ao.scale.ordinal().range(Al)},ao.scale.category20=function(){return ao.scale.ordinal().range(Cl)},ao.scale.category20b=function(){return ao.scale.ordinal().range(zl)},ao.scale.category20c=function(){return ao.scale.ordinal().range(Ll)};var Al=[2062260,16744206,2924588,14034728,9725885,9197131,14907330,8355711,12369186,1556175].map(xn),Cl=[2062260,11454440,16744206,16759672,2924588,10018698,14034728,16750742,9725885,12955861,9197131,12885140,14907330,16234194,8355711,13092807,12369186,14408589,1556175,10410725].map(xn),zl=[3750777,5395619,7040719,10264286,6519097,9216594,11915115,13556636,9202993,12426809,15186514,15190932,8666169,11356490,14049643,15177372,8077683,10834324,13528509,14589654].map(xn),Ll=[3244733,7057110,10406625,13032431,15095053,16616764,16625259,16634018,3253076,7652470,10607003,13101504,7695281,10394312,12369372,14342891,6513507,9868950,12434877,14277081].map(xn);ao.scale.quantile=function(){return au([],[])},ao.scale.quantize=function(){return lu(0,1,[0,1])},ao.scale.threshold=function(){return cu([.5],[0,1])},ao.scale.identity=function(){return fu([0,1])},ao.svg={},ao.svg.arc=function(){function n(){var n=Math.max(0,+e.apply(this,arguments)),c=Math.max(0,+r.apply(this,arguments)),f=o.apply(this,arguments)-Io,s=a.apply(this,arguments)-Io,h=Math.abs(s-f),p=f>s?0:1;if(n>c&&(g=c,c=n,n=g),h>=Oo)return t(c,p)+(n?t(n,1-p):"")+"Z";var g,v,d,y,m,M,x,b,_,w,S,k,N=0,E=0,A=[];if((y=(+l.apply(this,arguments)||0)/2)&&(d=u===ql?Math.sqrt(n*n+c*c):+u.apply(this,arguments),p||(E*=-1),c&&(E=tn(d/c*Math.sin(y))),n&&(N=tn(d/n*Math.sin(y)))),c){m=c*Math.cos(f+E),M=c*Math.sin(f+E),x=c*Math.cos(s-E),b=c*Math.sin(s-E);var C=Math.abs(s-f-2*E)<=Fo?0:1;if(E&&yu(m,M,x,b)===p^C){var z=(f+s)/2;m=c*Math.cos(z),M=c*Math.sin(z),x=b=null}}else m=M=0;if(n){_=n*Math.cos(s-N),w=n*Math.sin(s-N),S=n*Math.cos(f+N),k=n*Math.sin(f+N);var L=Math.abs(f-s+2*N)<=Fo?0:1;if(N&&yu(_,w,S,k)===1-p^L){var q=(f+s)/2;_=n*Math.cos(q),w=n*Math.sin(q),S=k=null}}else _=w=0;if(h>Uo&&(g=Math.min(Math.abs(c-n)/2,+i.apply(this,arguments)))>.001){v=c>n^p?0:1;var T=g,R=g;if(Fo>h){var D=null==S?[_,w]:null==x?[m,M]:Re([m,M],[S,k],[x,b],[_,w]),P=m-D[0],U=M-D[1],j=x-D[0],F=b-D[1],H=1/Math.sin(Math.acos((P*j+U*F)/(Math.sqrt(P*P+U*U)*Math.sqrt(j*j+F*F)))/2),O=Math.sqrt(D[0]*D[0]+D[1]*D[1]);R=Math.min(g,(n-O)/(H-1)),T=Math.min(g,(c-O)/(H+1))}if(null!=x){var I=mu(null==S?[_,w]:[S,k],[m,M],c,T,p),Y=mu([x,b],[_,w],c,T,p);g===T?A.push("M",I[0],"A",T,",",T," 0 0,",v," ",I[1],"A",c,",",c," 0 ",1-p^yu(I[1][0],I[1][1],Y[1][0],Y[1][1]),",",p," ",Y[1],"A",T,",",T," 0 0,",v," ",Y[0]):A.push("M",I[0],"A",T,",",T," 0 1,",v," ",Y[0])}else A.push("M",m,",",M);if(null!=S){var Z=mu([m,M],[S,k],n,-R,p),V=mu([_,w],null==x?[m,M]:[x,b],n,-R,p);g===R?A.push("L",V[0],"A",R,",",R," 0 0,",v," ",V[1],"A",n,",",n," 0 ",p^yu(V[1][0],V[1][1],Z[1][0],Z[1][1]),",",1-p," ",Z[1],"A",R,",",R," 0 0,",v," ",Z[0]):A.push("L",V[0],"A",R,",",R," 0 0,",v," ",Z[0])}else A.push("L",_,",",w)}else A.push("M",m,",",M),null!=x&&A.push("A",c,",",c," 0 ",C,",",p," ",x,",",b),A.push("L",_,",",w),null!=S&&A.push("A",n,",",n," 0 ",L,",",1-p," ",S,",",k);return A.push("Z"),A.join("")}function t(n,t){return"M0,"+n+"A"+n+","+n+" 0 1,"+t+" 0,"+-n+"A"+n+","+n+" 0 1,"+t+" 0,"+n}var e=hu,r=pu,i=su,u=ql,o=gu,a=vu,l=du;return n.innerRadius=function(t){return arguments.length?(e=En(t),n):e},n.outerRadius=function(t){return arguments.length?(r=En(t),n):r},n.cornerRadius=function(t){return arguments.length?(i=En(t),n):i},n.padRadius=function(t){return arguments.length?(u=t==ql?ql:En(t),n):u},n.startAngle=function(t){return arguments.length?(o=En(t),n):o},n.endAngle=function(t){return arguments.length?(a=En(t),n):a},n.padAngle=function(t){return arguments.length?(l=En(t),n):l},n.centroid=function(){var n=(+e.apply(this,arguments)+ +r.apply(this,arguments))/2,t=(+o.apply(this,arguments)+ +a.apply(this,arguments))/2-Io;return[Math.cos(t)*n,Math.sin(t)*n]},n};var ql="auto";ao.svg.line=function(){return Mu(m)};var Tl=ao.map({linear:xu,"linear-closed":bu,step:_u,"step-before":wu,"step-after":Su,basis:zu,"basis-open":Lu,"basis-closed":qu,bundle:Tu,cardinal:Eu,"cardinal-open":ku,"cardinal-closed":Nu,monotone:Fu});Tl.forEach(function(n,t){t.key=n,t.closed=/-closed$/.test(n)});var Rl=[0,2/3,1/3,0],Dl=[0,1/3,2/3,0],Pl=[0,1/6,2/3,1/6];ao.svg.line.radial=function(){var n=Mu(Hu);return n.radius=n.x,delete n.x,n.angle=n.y,delete n.y,n},wu.reverse=Su,Su.reverse=wu,ao.svg.area=function(){return Ou(m)},ao.svg.area.radial=function(){var n=Ou(Hu);return n.radius=n.x,delete n.x,n.innerRadius=n.x0,delete n.x0,n.outerRadius=n.x1,delete n.x1,n.angle=n.y,delete n.y,n.startAngle=n.y0,delete n.y0,n.endAngle=n.y1,delete n.y1,n},ao.svg.chord=function(){function n(n,a){var l=t(this,u,n,a),c=t(this,o,n,a);return"M"+l.p0+r(l.r,l.p1,l.a1-l.a0)+(e(l,c)?i(l.r,l.p1,l.r,l.p0):i(l.r,l.p1,c.r,c.p0)+r(c.r,c.p1,c.a1-c.a0)+i(c.r,c.p1,l.r,l.p0))+"Z"}function t(n,t,e,r){var i=t.call(n,e,r),u=a.call(n,i,r),o=l.call(n,i,r)-Io,f=c.call(n,i,r)-Io;return{r:u,a0:o,a1:f,p0:[u*Math.cos(o),u*Math.sin(o)],p1:[u*Math.cos(f),u*Math.sin(f)]}}function e(n,t){return n.a0==t.a0&&n.a1==t.a1}function r(n,t,e){return"A"+n+","+n+" 0 "+ +(e>Fo)+",1 "+t}function i(n,t,e,r){return"Q 0,0 "+r}var u=Me,o=xe,a=Iu,l=gu,c=vu;return n.radius=function(t){return arguments.length?(a=En(t),n):a},n.source=function(t){return arguments.length?(u=En(t),n):u},n.target=function(t){return arguments.length?(o=En(t),n):o},n.startAngle=function(t){return arguments.length?(l=En(t),n):l},n.endAngle=function(t){return arguments.length?(c=En(t),n):c},n},ao.svg.diagonal=function(){function n(n,i){var u=t.call(this,n,i),o=e.call(this,n,i),a=(u.y+o.y)/2,l=[u,{x:u.x,y:a},{x:o.x,y:a},o];return l=l.map(r),"M"+l[0]+"C"+l[1]+" "+l[2]+" "+l[3]}var t=Me,e=xe,r=Yu;return n.source=function(e){return arguments.length?(t=En(e),n):t},n.target=function(t){return arguments.length?(e=En(t),n):e},n.projection=function(t){return arguments.length?(r=t,n):r},n},ao.svg.diagonal.radial=function(){var n=ao.svg.diagonal(),t=Yu,e=n.projection;return n.projection=function(n){return arguments.length?e(Zu(t=n)):t},n},ao.svg.symbol=function(){function n(n,r){return(Ul.get(t.call(this,n,r))||$u)(e.call(this,n,r))}var t=Xu,e=Vu;return n.type=function(e){return arguments.length?(t=En(e),n):t},n.size=function(t){return arguments.length?(e=En(t),n):e},n};var Ul=ao.map({circle:$u,cross:function(n){var t=Math.sqrt(n/5)/2;return"M"+-3*t+","+-t+"H"+-t+"V"+-3*t+"H"+t+"V"+-t+"H"+3*t+"V"+t+"H"+t+"V"+3*t+"H"+-t+"V"+t+"H"+-3*t+"Z"},diamond:function(n){var t=Math.sqrt(n/(2*Fl)),e=t*Fl;return"M0,"+-t+"L"+e+",0 0,"+t+" "+-e+",0Z"},square:function(n){var t=Math.sqrt(n)/2;return"M"+-t+","+-t+"L"+t+","+-t+" "+t+","+t+" "+-t+","+t+"Z"},"triangle-down":function(n){var t=Math.sqrt(n/jl),e=t*jl/2;return"M0,"+e+"L"+t+","+-e+" "+-t+","+-e+"Z"},"triangle-up":function(n){var t=Math.sqrt(n/jl),e=t*jl/2;return"M0,"+-e+"L"+t+","+e+" "+-t+","+e+"Z"}});ao.svg.symbolTypes=Ul.keys();var jl=Math.sqrt(3),Fl=Math.tan(30*Yo);Co.transition=function(n){for(var t,e,r=Hl||++Zl,i=Ku(n),u=[],o=Ol||{time:Date.now(),ease:Nr,delay:0,duration:250},a=-1,l=this.length;++a<l;){u.push(t=[]);for(var c=this[a],f=-1,s=c.length;++f<s;)(e=c[f])&&Qu(e,f,i,r,o),t.push(e)}return Wu(u,i,r)},Co.interrupt=function(n){return this.each(null==n?Il:Bu(Ku(n)))};var Hl,Ol,Il=Bu(Ku()),Yl=[],Zl=0;Yl.call=Co.call,Yl.empty=Co.empty,Yl.node=Co.node,Yl.size=Co.size,ao.transition=function(n,t){return n&&n.transition?Hl?n.transition(t):n:ao.selection().transition(n)},ao.transition.prototype=Yl,Yl.select=function(n){var t,e,r,i=this.id,u=this.namespace,o=[];n=A(n);for(var a=-1,l=this.length;++a<l;){o.push(t=[]);for(var c=this[a],f=-1,s=c.length;++f<s;)(r=c[f])&&(e=n.call(r,r.__data__,f,a))?("__data__"in r&&(e.__data__=r.__data__),Qu(e,f,u,i,r[u][i]),t.push(e)):t.push(null)}return Wu(o,u,i)},Yl.selectAll=function(n){var t,e,r,i,u,o=this.id,a=this.namespace,l=[];n=C(n);for(var c=-1,f=this.length;++c<f;)for(var s=this[c],h=-1,p=s.length;++h<p;)if(r=s[h]){u=r[a][o],e=n.call(r,r.__data__,h,c),l.push(t=[]);for(var g=-1,v=e.length;++g<v;)(i=e[g])&&Qu(i,g,a,o,u),t.push(i)}return Wu(l,a,o)},Yl.filter=function(n){var t,e,r,i=[];"function"!=typeof n&&(n=O(n));for(var u=0,o=this.length;o>u;u++){i.push(t=[]);for(var e=this[u],a=0,l=e.length;l>a;a++)(r=e[a])&&n.call(r,r.__data__,a,u)&&t.push(r)}return Wu(i,this.namespace,this.id)},Yl.tween=function(n,t){var e=this.id,r=this.namespace;return arguments.length<2?this.node()[r][e].tween.get(n):Y(this,null==t?function(t){t[r][e].tween.remove(n)}:function(i){i[r][e].tween.set(n,t)})},Yl.attr=function(n,t){function e(){this.removeAttribute(a)}function r(){this.removeAttributeNS(a.space,a.local)}function i(n){return null==n?e:(n+="",function(){var t,e=this.getAttribute(a);return e!==n&&(t=o(e,n),function(n){this.setAttribute(a,t(n))})})}function u(n){return null==n?r:(n+="",function(){var t,e=this.getAttributeNS(a.space,a.local);return e!==n&&(t=o(e,n),function(n){this.setAttributeNS(a.space,a.local,t(n))})})}if(arguments.length<2){for(t in n)this.attr(t,n[t]);return this}var o="transform"==n?$r:Mr,a=ao.ns.qualify(n);return Ju(this,"attr."+n,t,a.local?u:i)},Yl.attrTween=function(n,t){function e(n,e){var r=t.call(this,n,e,this.getAttribute(i));return r&&function(n){this.setAttribute(i,r(n))}}function r(n,e){var r=t.call(this,n,e,this.getAttributeNS(i.space,i.local));return r&&function(n){this.setAttributeNS(i.space,i.local,r(n))}}var i=ao.ns.qualify(n);return this.tween("attr."+n,i.local?r:e)},Yl.style=function(n,e,r){function i(){this.style.removeProperty(n)}function u(e){return null==e?i:(e+="",function(){var i,u=t(this).getComputedStyle(this,null).getPropertyValue(n);return u!==e&&(i=Mr(u,e),function(t){this.style.setProperty(n,i(t),r)})})}var o=arguments.length;if(3>o){if("string"!=typeof n){2>o&&(e="");for(r in n)this.style(r,n[r],e);return this}r=""}return Ju(this,"style."+n,e,u)},Yl.styleTween=function(n,e,r){function i(i,u){var o=e.call(this,i,u,t(this).getComputedStyle(this,null).getPropertyValue(n));return o&&function(t){this.style.setProperty(n,o(t),r)}}return arguments.length<3&&(r=""),this.tween("style."+n,i)},Yl.text=function(n){return Ju(this,"text",n,Gu)},Yl.remove=function(){var n=this.namespace;return this.each("end.transition",function(){var t;this[n].count<2&&(t=this.parentNode)&&t.removeChild(this)})},Yl.ease=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].ease:("function"!=typeof n&&(n=ao.ease.apply(ao,arguments)),Y(this,function(r){r[e][t].ease=n}))},Yl.delay=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].delay:Y(this,"function"==typeof n?function(r,i,u){r[e][t].delay=+n.call(r,r.__data__,i,u)}:(n=+n,function(r){r[e][t].delay=n}))},Yl.duration=function(n){var t=this.id,e=this.namespace;return arguments.length<1?this.node()[e][t].duration:Y(this,"function"==typeof n?function(r,i,u){r[e][t].duration=Math.max(1,n.call(r,r.__data__,i,u))}:(n=Math.max(1,n),function(r){r[e][t].duration=n}))},Yl.each=function(n,t){var e=this.id,r=this.namespace;if(arguments.length<2){var i=Ol,u=Hl;try{Hl=e,Y(this,function(t,i,u){Ol=t[r][e],n.call(t,t.__data__,i,u)})}finally{Ol=i,Hl=u}}else Y(this,function(i){var u=i[r][e];(u.event||(u.event=ao.dispatch("start","end","interrupt"))).on(n,t)});return this},Yl.transition=function(){for(var n,t,e,r,i=this.id,u=++Zl,o=this.namespace,a=[],l=0,c=this.length;c>l;l++){a.push(n=[]);for(var t=this[l],f=0,s=t.length;s>f;f++)(e=t[f])&&(r=e[o][i],Qu(e,f,o,u,{time:r.time,ease:r.ease,delay:r.delay+r.duration,duration:r.duration})),n.push(e)}return Wu(a,o,u)},ao.svg.axis=function(){function n(n){n.each(function(){var n,c=ao.select(this),f=this.__chart__||e,s=this.__chart__=e.copy(),h=null==l?s.ticks?s.ticks.apply(s,a):s.domain():l,p=null==t?s.tickFormat?s.tickFormat.apply(s,a):m:t,g=c.selectAll(".tick").data(h,s),v=g.enter().insert("g",".domain").attr("class","tick").style("opacity",Uo),d=ao.transition(g.exit()).style("opacity",Uo).remove(),y=ao.transition(g.order()).style("opacity",1),M=Math.max(i,0)+o,x=Zi(s),b=c.selectAll(".domain").data([0]),_=(b.enter().append("path").attr("class","domain"),ao.transition(b));v.append("line"),v.append("text");var w,S,k,N,E=v.select("line"),A=y.select("line"),C=g.select("text").text(p),z=v.select("text"),L=y.select("text"),q="top"===r||"left"===r?-1:1;if("bottom"===r||"top"===r?(n=no,w="x",k="y",S="x2",N="y2",C.attr("dy",0>q?"0em":".71em").style("text-anchor","middle"),_.attr("d","M"+x[0]+","+q*u+"V0H"+x[1]+"V"+q*u)):(n=to,w="y",k="x",S="y2",N="x2",C.attr("dy",".32em").style("text-anchor",0>q?"end":"start"),_.attr("d","M"+q*u+","+x[0]+"H0V"+x[1]+"H"+q*u)),E.attr(N,q*i),z.attr(k,q*M),A.attr(S,0).attr(N,q*i),L.attr(w,0).attr(k,q*M),s.rangeBand){var T=s,R=T.rangeBand()/2;f=s=function(n){return T(n)+R}}else f.rangeBand?f=s:d.call(n,s,f);v.call(n,f,s),y.call(n,s,s)})}var t,e=ao.scale.linear(),r=Vl,i=6,u=6,o=3,a=[10],l=null;return n.scale=function(t){return arguments.length?(e=t,n):e},n.orient=function(t){return arguments.length?(r=t in Xl?t+"":Vl,n):r},n.ticks=function(){return arguments.length?(a=co(arguments),n):a},n.tickValues=function(t){return arguments.length?(l=t,n):l},n.tickFormat=function(e){return arguments.length?(t=e,n):t},n.tickSize=function(t){var e=arguments.length;return e?(i=+t,u=+arguments[e-1],n):i},n.innerTickSize=function(t){return arguments.length?(i=+t,n):i},n.outerTickSize=function(t){return arguments.length?(u=+t,n):u},n.tickPadding=function(t){return arguments.length?(o=+t,n):o},n.tickSubdivide=function(){return arguments.length&&n},n};var Vl="bottom",Xl={top:1,right:1,bottom:1,left:1};ao.svg.brush=function(){function n(t){t.each(function(){var t=ao.select(this).style("pointer-events","all").style("-webkit-tap-highlight-color","rgba(0,0,0,0)").on("mousedown.brush",u).on("touchstart.brush",u),o=t.selectAll(".background").data([0]);o.enter().append("rect").attr("class","background").style("visibility","hidden").style("cursor","crosshair"),t.selectAll(".extent").data([0]).enter().append("rect").attr("class","extent").style("cursor","move");var a=t.selectAll(".resize").data(v,m);a.exit().remove(),a.enter().append("g").attr("class",function(n){return"resize "+n}).style("cursor",function(n){return $l[n]}).append("rect").attr("x",function(n){return/[ew]$/.test(n)?-3:null}).attr("y",function(n){return/^[ns]/.test(n)?-3:null}).attr("width",6).attr("height",6).style("visibility","hidden"),a.style("display",n.empty()?"none":null);var l,s=ao.transition(t),h=ao.transition(o);c&&(l=Zi(c),h.attr("x",l[0]).attr("width",l[1]-l[0]),r(s)),f&&(l=Zi(f),h.attr("y",l[0]).attr("height",l[1]-l[0]),i(s)),e(s)})}function e(n){n.selectAll(".resize").attr("transform",function(n){return"translate("+s[+/e$/.test(n)]+","+h[+/^s/.test(n)]+")"})}function r(n){n.select(".extent").attr("x",s[0]),n.selectAll(".extent,.n>rect,.s>rect").attr("width",s[1]-s[0])}function i(n){n.select(".extent").attr("y",h[0]),n.selectAll(".extent,.e>rect,.w>rect").attr("height",h[1]-h[0])}function u(){function u(){32==ao.event.keyCode&&(C||(M=null,L[0]-=s[1],L[1]-=h[1],C=2),S())}function v(){32==ao.event.keyCode&&2==C&&(L[0]+=s[1],L[1]+=h[1],C=0,S())}function d(){var n=ao.mouse(b),t=!1;x&&(n[0]+=x[0],n[1]+=x[1]),C||(ao.event.altKey?(M||(M=[(s[0]+s[1])/2,(h[0]+h[1])/2]),L[0]=s[+(n[0]<M[0])],L[1]=h[+(n[1]<M[1])]):M=null),E&&y(n,c,0)&&(r(k),t=!0),A&&y(n,f,1)&&(i(k),t=!0),t&&(e(k),w({type:"brush",mode:C?"move":"resize"}))}function y(n,t,e){var r,i,u=Zi(t),l=u[0],c=u[1],f=L[e],v=e?h:s,d=v[1]-v[0];return C&&(l-=f,c-=d+f),r=(e?g:p)?Math.max(l,Math.min(c,n[e])):n[e],C?i=(r+=f)+d:(M&&(f=Math.max(l,Math.min(c,2*M[e]-r))),r>f?(i=r,r=f):i=f),v[0]!=r||v[1]!=i?(e?a=null:o=null,v[0]=r,v[1]=i,!0):void 0}function m(){d(),k.style("pointer-events","all").selectAll(".resize").style("display",n.empty()?"none":null),ao.select("body").style("cursor",null),q.on("mousemove.brush",null).on("mouseup.brush",null).on("touchmove.brush",null).on("touchend.brush",null).on("keydown.brush",null).on("keyup.brush",null),z(),w({type:"brushend"})}var M,x,b=this,_=ao.select(ao.event.target),w=l.of(b,arguments),k=ao.select(b),N=_.datum(),E=!/^(n|s)$/.test(N)&&c,A=!/^(e|w)$/.test(N)&&f,C=_.classed("extent"),z=W(b),L=ao.mouse(b),q=ao.select(t(b)).on("keydown.brush",u).on("keyup.brush",v);if(ao.event.changedTouches?q.on("touchmove.brush",d).on("touchend.brush",m):q.on("mousemove.brush",d).on("mouseup.brush",m),k.interrupt().selectAll("*").interrupt(),C)L[0]=s[0]-L[0],L[1]=h[0]-L[1];else if(N){var T=+/w$/.test(N),R=+/^n/.test(N);x=[s[1-T]-L[0],h[1-R]-L[1]],L[0]=s[T],L[1]=h[R]}else ao.event.altKey&&(M=L.slice());k.style("pointer-events","none").selectAll(".resize").style("display",null),ao.select("body").style("cursor",_.style("cursor")),w({type:"brushstart"}),d()}var o,a,l=N(n,"brushstart","brush","brushend"),c=null,f=null,s=[0,0],h=[0,0],p=!0,g=!0,v=Bl[0];return n.event=function(n){n.each(function(){var n=l.of(this,arguments),t={x:s,y:h,i:o,j:a},e=this.__chart__||t;this.__chart__=t,Hl?ao.select(this).transition().each("start.brush",function(){o=e.i,a=e.j,s=e.x,h=e.y,n({type:"brushstart"})}).tween("brush:brush",function(){var e=xr(s,t.x),r=xr(h,t.y);return o=a=null,function(i){s=t.x=e(i),h=t.y=r(i),n({type:"brush",mode:"resize"})}}).each("end.brush",function(){o=t.i,a=t.j,n({type:"brush",mode:"resize"}),n({type:"brushend"})}):(n({type:"brushstart"}),n({type:"brush",mode:"resize"}),n({type:"brushend"}))})},n.x=function(t){return arguments.length?(c=t,v=Bl[!c<<1|!f],n):c},n.y=function(t){return arguments.length?(f=t,v=Bl[!c<<1|!f],n):f},n.clamp=function(t){return arguments.length?(c&&f?(p=!!t[0],g=!!t[1]):c?p=!!t:f&&(g=!!t),n):c&&f?[p,g]:c?p:f?g:null},n.extent=function(t){var e,r,i,u,l;return arguments.length?(c&&(e=t[0],r=t[1],f&&(e=e[0],r=r[0]),o=[e,r],c.invert&&(e=c(e),r=c(r)),e>r&&(l=e,e=r,r=l),e==s[0]&&r==s[1]||(s=[e,r])),f&&(i=t[0],u=t[1],c&&(i=i[1],u=u[1]),a=[i,u],f.invert&&(i=f(i),u=f(u)),i>u&&(l=i,i=u,u=l),i==h[0]&&u==h[1]||(h=[i,u])),n):(c&&(o?(e=o[0],r=o[1]):(e=s[0],r=s[1],c.invert&&(e=c.invert(e),r=c.invert(r)),e>r&&(l=e,e=r,r=l))),f&&(a?(i=a[0],u=a[1]):(i=h[0],u=h[1],f.invert&&(i=f.invert(i),u=f.invert(u)),i>u&&(l=i,i=u,u=l))),c&&f?[[e,i],[r,u]]:c?[e,r]:f&&[i,u])},n.clear=function(){return n.empty()||(s=[0,0],h=[0,0],o=a=null),n},n.empty=function(){return!!c&&s[0]==s[1]||!!f&&h[0]==h[1]},ao.rebind(n,l,"on")};var $l={n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},Bl=[["n","e","s","w","nw","ne","se","sw"],["e","w"],["n","s"],[]],Wl=ga.format=xa.timeFormat,Jl=Wl.utc,Gl=Jl("%Y-%m-%dT%H:%M:%S.%LZ");Wl.iso=Date.prototype.toISOString&&+new Date("2000-01-01T00:00:00.000Z")?eo:Gl,eo.parse=function(n){var t=new Date(n);return isNaN(t)?null:t},eo.toString=Gl.toString,ga.second=On(function(n){return new va(1e3*Math.floor(n/1e3))},function(n,t){n.setTime(n.getTime()+1e3*Math.floor(t))},function(n){return n.getSeconds()}),ga.seconds=ga.second.range,ga.seconds.utc=ga.second.utc.range,ga.minute=On(function(n){return new va(6e4*Math.floor(n/6e4))},function(n,t){n.setTime(n.getTime()+6e4*Math.floor(t))},function(n){return n.getMinutes()}),ga.minutes=ga.minute.range,ga.minutes.utc=ga.minute.utc.range,ga.hour=On(function(n){var t=n.getTimezoneOffset()/60;return new va(36e5*(Math.floor(n/36e5-t)+t))},function(n,t){n.setTime(n.getTime()+36e5*Math.floor(t))},function(n){return n.getHours()}),ga.hours=ga.hour.range,ga.hours.utc=ga.hour.utc.range,ga.month=On(function(n){return n=ga.day(n),n.setDate(1),n},function(n,t){n.setMonth(n.getMonth()+t)},function(n){return n.getMonth()}),ga.months=ga.month.range,ga.months.utc=ga.month.utc.range;var Kl=[1e3,5e3,15e3,3e4,6e4,3e5,9e5,18e5,36e5,108e5,216e5,432e5,864e5,1728e5,6048e5,2592e6,7776e6,31536e6],Ql=[[ga.second,1],[ga.second,5],[ga.second,15],[ga.second,30],[ga.minute,1],[ga.minute,5],[ga.minute,15],[ga.minute,30],[ga.hour,1],[ga.hour,3],[ga.hour,6],[ga.hour,12],[ga.day,1],[ga.day,2],[ga.week,1],[ga.month,1],[ga.month,3],[ga.year,1]],nc=Wl.multi([[".%L",function(n){return n.getMilliseconds()}],[":%S",function(n){return n.getSeconds()}],["%I:%M",function(n){return n.getMinutes()}],["%I %p",function(n){return n.getHours()}],["%a %d",function(n){return n.getDay()&&1!=n.getDate()}],["%b %d",function(n){return 1!=n.getDate()}],["%B",function(n){return n.getMonth()}],["%Y",zt]]),tc={range:function(n,t,e){return ao.range(Math.ceil(n/e)*e,+t,e).map(io)},floor:m,ceil:m};Ql.year=ga.year,ga.scale=function(){return ro(ao.scale.linear(),Ql,nc)};var ec=Ql.map(function(n){return[n[0].utc,n[1]]}),rc=Jl.multi([[".%L",function(n){return n.getUTCMilliseconds()}],[":%S",function(n){return n.getUTCSeconds()}],["%I:%M",function(n){return n.getUTCMinutes()}],["%I %p",function(n){return n.getUTCHours()}],["%a %d",function(n){return n.getUTCDay()&&1!=n.getUTCDate()}],["%b %d",function(n){return 1!=n.getUTCDate()}],["%B",function(n){return n.getUTCMonth()}],["%Y",zt]]);ec.year=ga.year.utc,ga.scale.utc=function(){return ro(ao.scale.linear(),ec,rc)},ao.text=An(function(n){return n.responseText}),ao.json=function(n,t){return Cn(n,"application/json",uo,t)},ao.html=function(n,t){return Cn(n,"text/html",oo,t)},ao.xml=An(function(n){return n.responseXML}),"function"==typeof define&&define.amd?(this.d3=ao,define(ao)):"object"==typeof module&&module.exports?module.exports=ao:this.d3=ao}();
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/dfs-dust.js b/hadoop-hdds/framework/src/main/resources/webapps/static/dfs-dust.js
new file mode 100644
index 0000000..c7af6a1
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/dfs-dust.js
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function ($, dust, exports) {
+  "use strict";
+
+  var filters = {
+    'fmt_bytes': function (v) {
+      var UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'ZB'];
+      var prev = 0, i = 0;
+      while (Math.floor(v) > 0 && i < UNITS.length) {
+        prev = v;
+        v /= 1024;
+        i += 1;
+      }
+
+      if (i > 0 && i < UNITS.length) {
+        v = prev;
+        i -= 1;
+      }
+      return Math.round(v * 100) / 100 + ' ' + UNITS[i];
+    },
+
+    'fmt_percentage': function (v) {
+      return Math.round(v * 100) / 100 + '%';
+    },
+    'elapsed': function(v) {
+      //elapsed sec from epoch sec
+      return Date.now() - v * 1000;
+    },
+    'fmt_time': function (v) {
+      var s = Math.floor(v / 1000), h = Math.floor(s / 3600);
+      s -= h * 3600;
+      var m = Math.floor(s / 60);
+      s -= m * 60;
+
+      var res = s + " sec";
+      if (m !== 0) {
+        res = m + " mins, " + res;
+      }
+
+      if (h !== 0) {
+        res = h + " hrs, " + res;
+      }
+
+      return res;
+    },
+
+    'date_tostring' : function (v) {
+      return moment(Number(v)).format('ddd MMM DD HH:mm:ss ZZ YYYY');
+    },
+
+    'format_compile_info' : function (v) {
+      var info = v.split(" by ")
+      var date = moment(info[0]).format('ddd MMM DD HH:mm:ss ZZ YYYY');
+      return date.concat(" by ").concat(info[1]);
+     },
+
+    'helper_to_permission': function (v) {
+      var symbols = [ '---', '--x', '-w-', '-wx', 'r--', 'r-x', 'rw-', 'rwx' ];
+      var vInt = parseInt(v, 8);
+      var sticky = (vInt & (1 << 9)) != 0;
+
+      var res = "";
+      for (var i = 0; i < 3; ++i) {
+        res = symbols[(v % 10)] + res;
+        v = Math.floor(v / 10);
+      }
+
+      if (sticky) {
+        var otherExec = (vInt & 1) == 1;
+        res = res.substr(0, res.length - 1) + (otherExec ? 't' : 'T');
+      }
+
+      return res;
+    },
+
+    'helper_to_directory' : function (v) {
+      return v === 'DIRECTORY' ? 'd' : '-';
+    },
+
+    'helper_to_acl_bit': function (v) {
+      return v ? '+' : "";
+    },
+
+    'fmt_number': function (v) {
+      return v.toLocaleString();
+    }
+  };
+  $.extend(dust.filters, filters);
+
+  /**
+   * Load a sequence of JSON.
+   *
+   * beans is an array of tuples in the format of {url, name}.
+   */
+  function load_json(beans, success_cb, error_cb) {
+    var data = {}, error = false, to_be_completed = beans.length;
+
+    $.each(beans, function(idx, b) {
+      if (error) {
+        return false;
+      }
+      $.get(b.url, function (resp) {
+        data[b.name] = resp;
+        to_be_completed -= 1;
+        if (to_be_completed === 0) {
+          success_cb(data);
+        }
+      }).error(function (jqxhr, text, err) {
+        error = true;
+        error_cb(b.url, jqxhr, text, err);
+      });
+    });
+  }
+
+  exports.load_json = load_json;
+
+}($, dust, window));
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css
new file mode 100644
index 0000000..b8a5c0f
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css
@@ -0,0 +1,2 @@
+.nvd3 .nv-axis line,.nvd3 .nv-axis path{fill:none;shape-rendering:crispEdges}.nv-brush .extent,.nvd3 .background path,.nvd3 .nv-axis line,.nvd3 .nv-axis path{shape-rendering:crispEdges}.nv-distx,.nv-disty,.nv-noninteractive,.nvd3 .nv-axis,.nvd3.nv-pie .nv-label,.nvd3.nv-sparklineplus g.nv-hoverValue{pointer-events:none}.nvd3 .nv-axis{opacity:1}.nvd3 .nv-axis.nv-disabled,.nvd3 .nv-controlsWrap .nv-legend .nv-check-box .nv-check{opacity:0}.nvd3 .nv-axis path{stroke:#000;stroke-opacity:.75}.nvd3 .nv-axis path.domain{stroke-opacity:.75}.nvd3 .nv-axis.nv-x path.domain{stroke-opacity:0}.nvd3 .nv-axis line{stroke:#e5e5e5}.nvd3 .nv-axis .zero line, .nvd3 .nv-axis line.zero{stroke-opacity:.75}.nvd3 .nv-axis .nv-axisMaxMin text{font-weight:700}.nvd3 .x .nv-axis .nv-axisMaxMin text,.nvd3 .x2 .nv-axis .nv-axisMaxMin text,.nvd3 .x3 .nv-axis .nv-axisMaxMin text{text-anchor:middle}.nvd3 .nv-bars rect{fill-opacity:.75;transition:fill-opacity 250ms linear}.nvd3 .nv-bars rect.hover{fill-opacity:1}.nvd3 .nv-bars .hover rect{fill:#add8e6}.nvd3 .nv-bars text{fill:transparent}.nvd3 .nv-bars .hover text{fill:rgba(0,0,0,1)}.nvd3 .nv-discretebar .nv-groups rect,.nvd3 .nv-multibar .nv-groups rect,.nvd3 .nv-multibarHorizontal .nv-groups rect{stroke-opacity:0;transition:fill-opacity 250ms linear}.with-transitions .nv-candlestickBar .nv-ticks .nv-tick,.with-transitions .nvd3 .nv-groups .nv-point{transition:stroke-width 250ms linear,stroke-opacity 250ms linear}.nvd3 .nv-candlestickBar .nv-ticks rect:hover,.nvd3 .nv-discretebar .nv-groups rect:hover,.nvd3 .nv-multibar .nv-groups rect:hover,.nvd3 .nv-multibarHorizontal .nv-groups rect:hover{fill-opacity:1}.nvd3 .nv-discretebar .nv-groups text,.nvd3 .nv-multibarHorizontal .nv-groups text{font-weight:700;fill:rgba(0,0,0,1);stroke:transparent}.nvd3 .nv-boxplot circle{fill-opacity:.5}.nvd3 .nv-boxplot circle:hover,.nvd3 .nv-boxplot rect:hover{fill-opacity:1}.nvd3 line.nv-boxplot-median{stroke:#000}.nv-boxplot-tick:hover{stroke-width:2.5px}.nvd3.nv-bullet{font:10px sans-serif}.nvd3.nv-bullet .nv-measure{fill-opacity:.8}.nvd3.nv-bullet .nv-measure:hover{fill-opacity:1}.nvd3.nv-bullet .nv-marker{stroke:#000;stroke-width:2px}.nvd3.nv-bullet .nv-markerTriangle{stroke:#000;fill:#fff;stroke-width:1.5px}.nvd3.nv-bullet .nv-markerLine{stroke:#000;stroke-width:1.5px}.nvd3.nv-bullet .nv-tick line{stroke:#666;stroke-width:.5px}.nvd3.nv-bullet .nv-range.nv-s0{fill:#eee}.nvd3.nv-bullet .nv-range.nv-s1{fill:#ddd}.nvd3.nv-bullet .nv-range.nv-s2{fill:#ccc}.nvd3.nv-bullet .nv-title{font-size:14px;font-weight:700}.nvd3.nv-bullet .nv-subtitle{fill:#999}.nvd3.nv-bullet .nv-range{fill:#bababa;fill-opacity:.4}.nvd3.nv-bullet .nv-range:hover{fill-opacity:.7}.nvd3.nv-candlestickBar .nv-ticks .nv-tick{stroke-width:1px}.nvd3.nv-candlestickBar .nv-ticks .nv-tick.hover{stroke-width:2px}.nvd3.nv-candlestickBar .nv-ticks .nv-tick.positive rect{stroke:#2ca02c;fill:#2ca02c}.nvd3.nv-candlestickBar .nv-ticks .nv-tick.negative rect{stroke:#d62728;fill:#d62728}.nvd3.nv-candlestickBar .nv-ticks line{stroke:#333}.nv-force-node{stroke:#fff;stroke-width:1.5px}.nv-force-link{stroke:#999;stroke-opacity:.6}.nv-force-node text{stroke-width:0}.nvd3 .nv-check-box .nv-box{fill-opacity:0;stroke-width:2}.nvd3 .nv-check-box .nv-check{fill-opacity:0;stroke-width:4}.nvd3 .nv-series.nv-disabled .nv-check-box .nv-check{fill-opacity:0;stroke-opacity:0}.nvd3.nv-linePlusBar .nv-bar rect{fill-opacity:.75}.nvd3.nv-linePlusBar .nv-bar rect:hover{fill-opacity:1}.nvd3 .nv-groups path.nv-line{fill:none}.nvd3 .nv-groups path.nv-area{stroke:none}.nvd3.nv-line .nvd3.nv-scatter .nv-groups .nv-point{fill-opacity:0;stroke-opacity:0}.nvd3.nv-scatter.nv-single-point .nv-groups .nv-point{fill-opacity:.5!important;stroke-opacity:.5!important}.nvd3 .nv-groups .nv-point.hover,.nvd3.nv-scatter .nv-groups .nv-point.hover{stroke-width:7px;fill-opacity:.95!important;stroke-opacity:.95!important}.nvd3 .nv-point-paths path{stroke:#aaa;stroke-opacity:0;fill:#eee;fill-opacity:0}.nvd3 .nv-indexLine{cursor:ew-resize}svg.nvd3-svg{-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;display:block;width:100%;height:100%}.nvtooltip.with-3d-shadow,.with-3d-shadow .nvtooltip{box-shadow:0 5px 10px rgba(0,0,0,.2);border-radius:5px}.nvd3 text{font:400 12px Arial,sans-serif}.nvd3 .title{font:700 14px Arial,sans-serif}.nvd3 .nv-background{fill:#fff;fill-opacity:0}.nvd3.nv-noData{font-size:18px;font-weight:700}.nv-brush .extent{fill-opacity:.125}.nv-brush .resize path{fill:#eee;stroke:#666}.nvd3 .nv-legend .nv-series{cursor:pointer}.nvd3 .nv-legend .nv-disabled circle{fill-opacity:0}.nvd3 .nv-brush .extent{fill-opacity:0!important}.nvd3 .nv-brushBackground rect{stroke:#000;stroke-width:.4;fill:#fff;fill-opacity:.7}@media print{.nvd3 text{stroke-width:0;fill-opacity:1}}.nvd3.nv-ohlcBar .nv-ticks .nv-tick{stroke-width:1px}.nvd3.nv-ohlcBar .nv-ticks .nv-tick.hover{stroke-width:2px}.nvd3.nv-ohlcBar .nv-ticks .nv-tick.positive{stroke:#2ca02c}.nvd3.nv-ohlcBar .nv-ticks .nv-tick.negative{stroke:#d62728}.nvd3 .background path{fill:none;stroke:#EEE;stroke-opacity:.4}.nvd3 .foreground path{fill:none;stroke-opacity:.7}.nvd3 .nv-parallelCoordinates-brush .extent{fill:#fff;fill-opacity:.6;stroke:gray;shape-rendering:crispEdges}.nvd3 .nv-parallelCoordinates .hover{fill-opacity:1;stroke-width:3px}.nvd3 .missingValuesline line{fill:none;stroke:#000;stroke-width:1;stroke-opacity:1;stroke-dasharray:5,5}.nvd3.nv-pie .nv-pie-title{font-size:24px;fill:rgba(19,196,249,.59)}.nvd3.nv-pie .nv-slice text{stroke:#000;stroke-width:0}.nvd3.nv-pie path{transition:fill-opacity 250ms linear,stroke-width 250ms linear,stroke-opacity 250ms linear;stroke:#fff;stroke-width:1px;stroke-opacity:1;fill-opacity:.7}.nvd3.nv-pie .hover path{fill-opacity:1}.nvd3.nv-pie .nv-label rect{fill-opacity:0;stroke-opacity:0}.nvd3 .nv-groups .nv-point.hover{stroke-width:20px;stroke-opacity:.5}.nvd3 .nv-scatter .nv-point.hover{fill-opacity:1}.nvd3.nv-sparkline path{fill:none}.nvd3.nv-sparklineplus .nv-hoverValue line{stroke:#333;stroke-width:1.5px}.nvd3.nv-sparklineplus,.nvd3.nv-sparklineplus g{pointer-events:all}.nvd3 .nv-interactiveGuideLine,.nvtooltip{pointer-events:none}.nvd3 .nv-hoverArea{fill-opacity:0;stroke-opacity:0}.nvd3.nv-sparklineplus .nv-xValue,.nvd3.nv-sparklineplus .nv-yValue{stroke-width:0;font-size:.9em;font-weight:400}.nvd3.nv-sparklineplus .nv-yValue{stroke:#f66}.nvd3.nv-sparklineplus .nv-maxValue{stroke:#2ca02c;fill:#2ca02c}.nvd3.nv-sparklineplus .nv-minValue{stroke:#d62728;fill:#d62728}.nvd3.nv-sparklineplus .nv-currentValue{font-weight:700;font-size:1.1em}.nvtooltip h3,.nvtooltip table td.key{font-weight:400}.nvd3.nv-stackedarea path.nv-area{fill-opacity:.7;stroke-opacity:0;transition:fill-opacity 250ms linear,stroke-opacity 250ms linear}.nvd3.nv-stackedarea path.nv-area.hover{fill-opacity:.9}.nvd3.nv-stackedarea .nv-groups .nv-point{stroke-opacity:0;fill-opacity:0}.nvtooltip{position:absolute;color:rgba(0,0,0,1);padding:1px;z-index:10000;display:block;font-family:Arial,sans-serif;font-size:13px;text-align:left;white-space:nowrap;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background:rgba(255,255,255,.8);border:1px solid rgba(0,0,0,.5);border-radius:4px}.nvtooltip h3,.nvtooltip p{margin:0;text-align:center}.nvtooltip.with-transitions,.with-transitions .nvtooltip{transition:opacity 50ms linear;transition-delay:200ms}.nvtooltip.x-nvtooltip,.nvtooltip.y-nvtooltip{padding:8px}.nvtooltip h3{padding:4px 14px;line-height:18px;background-color:rgba(247,247,247,.75);color:rgba(0,0,0,1);border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.nvtooltip p{padding:5px 14px}.nvtooltip span{display:inline-block;margin:2px 0}.nvtooltip table{margin:6px;border-spacing:0}.nvtooltip table td{padding:2px 9px 2px 0;vertical-align:middle}.nvtooltip table td.key.total{font-weight:700}.nvtooltip table td.value{text-align:right;font-weight:700}.nvtooltip table td.percent{color:#a9a9a9}.nvtooltip table tr.highlight td{padding:1px 9px 1px 0;border-bottom-style:solid;border-bottom-width:1px;border-top-style:solid;border-top-width:1px}.nvtooltip table td.legend-color-guide div{vertical-align:middle;width:12px;height:12px;border:1px solid #999}.nvtooltip .footer{padding:3px;text-align:center}.nvtooltip-pending-removal{pointer-events:none;display:none}.nvd3 line.nv-guideline{stroke:#ccc}
+/*# sourceMappingURL=nv.d3.min.css.map */
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map
new file mode 100644
index 0000000..63380e6
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["build/nv.d3.css"],"names":[],"mappings":"AAqBA,oBAfA,oBAgBI,KAAM,KAiWN,gBAAiB,WA/ErB,kBA+DA,uBAlVA,oBAfA,oBAiXI,gBAAiB,WAqErB,UAAW,UAJX,mBAvbA,eAoaA,uBAgCA,uCACI,eAAgB,KArcpB,eAEI,QAAS,EAuCb,2BAsJA,0DACI,QAAS,EA3Lb,oBAEI,OAAQ,KACR,eAAgB,IAIpB,2BACI,eAAgB,IAGpB,gCACI,eAAgB,EAGpB,oBAEI,OAAQ,QAIZ,0BACI,0BACA,eAAgB,IAGpB,mCACI,YAAa,IAGjB,sCACA,uCACA,uCACI,YAAa,OAOjB,oBACI,aAAc,IAEd,WAAY,aAAa,MAAM,OAGnC,0BACI,aAAc,EAGlB,2BACI,KAAM,QAGV,oBACI,KAAM,YAGV,2BACI,KAAM,cAKV,sCAFA,mCACA,6CAEI,eAAgB,EAEhB,WAAY,aAAa,MAAM,OA8EnC,wDAwEA,6CACI,WAAY,aAAa,MAAM,OAAQ,eAAe,MAAM,OAlJhE,8CACA,4CAHA,yCACA,mDAGI,aAAc,EAGlB,sCACA,6CACI,YAAa,IACb,KAAM,cACN,OAAQ,YAIZ,yBACE,aAAc,GAGhB,+BAIA,6BAHE,aAAc,EAOhB,6BACE,OAAQ,KAGV,uBACE,aAAc,MAGhB,gBAAkB,KAAM,KAAK,WAC7B,4BAA8B,aAAc,GAC5C,kCAAoC,aAAc,EAClD,2BAA6B,OAAQ,KAAM,aAAc,IACzD,mCAAqC,OAAQ,KAAM,KAAM,KAAM,aAAc,MAC7E,+BAAiC,OAAQ,KAAM,aAAc,MAC7D,8BAAgC,OAAQ,KAAM,aAAc,KAC5D,gCAAkC,KAAM,KACxC,gCAAkC,KAAM,KACxC,gCAAkC,KAAM,KACxC,0BAA4B,UAAW,KAAM,YAAa,IAC1D,6BAA+B,KAAM,KAErC,0BACI,KAAM,QACN,aAAc,GAGlB,gCACI,aAAc,GAGlB,2CACI,aAAc,IAGlB,iDACI,aAAc,IAGlB,yDACI,OAAQ,QACR,KAAM,QAGV,yDACI,OAAQ,QACR,KAAM,QAOV,uCACI,OAAQ,KAGZ,eACI,OAAQ,KACR,aAAc,MAGlB,eACI,OAAQ,KACR,eAAgB,GAGpB,oBACI,aAAc,EAOlB,4BACI,aAAa,EACb,aAAa,EAGjB,8BACI,aAAa,EACb,aAAa,EAGjB,qDACI,aAAa,EACb,eAAe,EAQnB,kCACI,aAAc,IAGlB,wCACI,aAAc,EAElB,8BACI,KAAM,KAGV,8BACI,OAAQ,KAGZ,oDACI,aAAc,EACd,eAAgB,EAGpB,sDACI,aAAc,aACd,eAAgB,aASpB,iCADA,4CAEI,aAAc,IACd,aAAc,cACd,eAAgB,cAIpB,2BACI,OAAQ,KACR,eAAgB,EAChB,KAAM,KACN,aAAc,EAIlB,oBACI,OAAQ,UAUZ,aACI,oBAAqB,KAClB,iBAAkB,KACjB,gBAAiB,KACb,YAAa,KACrB,QAAS,MACT,MAAM,KACN,OAAO,KAMX,0BAA2B,2BACvB,WAAY,EAAE,IAAI,KAAK,eACvB,cAAe,IAInB,WACI,KAAM,IAAO,KAAK,MAAO,WAG7B,aACI,KAAM,IAAK,KAAK,MAAO,WAG3B,qBACI,KAAM,KACN,aAAc,EAGlB,gBACI,UAAW,KACX,YAAa,IAQjB,kBACI,aAAc,KAIlB,uBACI,KAAM,KACN,OAAQ,KAQZ,4BACI,OAAQ,QAGZ,qCACI,aAAc,EAIlB,wBACI,aAAc,YAGlB,+BACI,OAAQ,KACR,aAAc,GACd,KAAM,KACN,aAAc,GAOlB,aACI,WACI,aAAc,EACd,aAAc,GAItB,oCACI,aAAc,IAGlB,0CACI,aAAc,IAGlB,6CACI,OAAQ,QAGZ,6CACI,OAAQ,QAIZ,uBACI,KAAM,KACN,OAAQ,KACR,eAAgB,GAIpB,uBACI,KAAM,KACN,eAAgB,GAGpB,4CACI,KAAM,KACN,aAAc,GACd,OAAQ,KACR,gBAAiB,WAGrB,qCACI,aAAc,EACjB,aAAc,IAIf,8BACE,KAAM,KACN,OAAQ,KACR,aAAc,EACd,eAAgB,EAChB,iBAAkB,EAAG,EAQvB,2BACI,UAAW,KACX,KAAM,qBAGV,4BACI,OAAQ,KACR,aAAc,EAGlB,kBAbI,WAAY,aAAa,MAAM,OAAQ,aAAa,MAAM,OAAQ,eAAe,MAAM,OAcvF,OAAQ,KACR,aAAc,IACd,eAAgB,EAIhB,aAAc,GAGlB,yBACI,aAAc,EAOlB,4BACI,aAAc,EACd,eAAgB,EAIpB,iCACI,aAAc,KACd,eAAgB,GAGpB,kCACI,aAAc,EAYlB,wBACI,KAAM,KAOV,2CACI,OAAQ,KACR,aAAc,MAGlB,uBACA,yBACI,eAAgB,IAsLpB,+BApIA,WAqII,eAAe,KApLnB,oBACI,aAAc,EACd,eAAgB,EAGpB,kCACA,kCACI,aAAc,EACd,UAAW,KACX,YAAa,IAGjB,kCACI,OAAQ,KAGZ,oCACI,OAAQ,QACR,KAAM,QAGV,oCACI,OAAQ,QACR,KAAM,QAGV,wCACI,YAAa,IACb,UAAW,MAgEf,cAoCA,wBACI,YAAa,IAlGjB,kCACI,aAAc,GACd,eAAgB,EAChB,WAAY,aAAa,MAAM,OAAQ,eAAe,MAAM,OAGhE,wCACI,aAAc,GAIlB,0CACI,eAAgB,EAChB,aAAc,EAGlB,WACI,SAAU,SAEV,MAAO,cACP,QAAS,IAET,QAAS,MACT,QAAS,MAET,YAAa,MAAO,WACpB,UAAW,KACX,WAAY,KAGZ,YAAa,OAEb,oBAAqB,KAElB,iBAAkB,KAEjB,gBAAiB,KAEb,YAAa,KAIrB,WAAY,qBACZ,OAAQ,IAAI,MAAM,eAClB,cAAe,IAiBnB,cAcA,aACI,OAAQ,EAER,WAAY,OA5BhB,4BAA6B,6BACzB,WAAY,QAAQ,KAAK,OAEzB,iBAAkB,MAGtB,uBACA,uBACI,QAAS,IAGb,cAEI,QAAS,IAAI,KACb,YAAa,KAEb,iBAAkB,sBAClB,MAAO,cAGP,cAAe,IAAI,MAAM,QAEzB,cAAe,IAAI,IAAI,EAAE,EAG7B,aAEI,QAAS,IAAI,KAIjB,gBACI,QAAS,aACT,OAAQ,IAAI,EAGhB,iBACI,OAAQ,IACR,eAAe,EAInB,oBACI,QAAS,IAAI,IAAI,IAAI,EACrB,eAAgB,OAOpB,8BACI,YAAa,IAGjB,0BACI,WAAY,MACZ,YAAa,IAGjB,4BACI,MAAO,QAGX,iCACI,QAAS,IAAI,IAAI,IAAI,EACrB,oBAAqB,MACrB,oBAAqB,IACrB,iBAAkB,MAClB,iBAAkB,IAGtB,2CAGI,eAAgB,OAIhB,MAAO,KACP,OAAQ,KACR,OAAQ,IAAI,MAAM,KAGtB,mBACI,QAAS,IACT,WAAY,OAGhB,2BACI,eAAgB,KAChB,QAAS,KAWb,wBACI,OAAQ"}
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js
new file mode 100644
index 0000000..9cfd702
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js
@@ -0,0 +1,11 @@
+/* nvd3 version 1.8.5 (https://github.com/novus/nvd3) 2016-12-01 */
+
+!function(){var a={};a.dev=!1,a.tooltip=a.tooltip||{},a.utils=a.utils||{},a.models=a.models||{},a.charts={},a.logs={},a.dom={},"undefined"!=typeof module&&"undefined"!=typeof exports&&"undefined"==typeof d3&&(d3=require("d3")),a.dispatch=d3.dispatch("render_start","render_end"),Function.prototype.bind||(Function.prototype.bind=function(a){if("function"!=typeof this)throw new TypeError("Function.prototype.bind - what is trying to be bound is not callable");var b=Array.prototype.slice.call(arguments,1),c=this,d=function(){},e=function(){return c.apply(this instanceof d&&a?this:a,b.concat(Array.prototype.slice.call(arguments)))};return d.prototype=this.prototype,e.prototype=new d,e}),a.dev&&(a.dispatch.on("render_start",function(b){a.logs.startTime=+new Date}),a.dispatch.on("render_end",function(b){a.logs.endTime=+new Date,a.logs.totalTime=a.logs.endTime-a.logs.startTime,a.log("total",a.logs.totalTime)})),a.log=function(){if(a.dev&&window.console&&console.log&&console.log.apply)console.log.apply(console,arguments);else if(a.dev&&window.console&&"function"==typeof console.log&&Function.prototype.bind){var b=Function.prototype.bind.call(console.log,console);b.apply(console,arguments)}return arguments[arguments.length-1]},a.deprecated=function(a,b){console&&console.warn&&console.warn("nvd3 warning: `"+a+"` has been deprecated. ",b||"")},a.render=function(b){b=b||1,a.render.active=!0,a.dispatch.render_start();var c=function(){for(var d,e,f=0;b>f&&(e=a.render.queue[f]);f++)d=e.generate(),typeof e.callback==typeof Function&&e.callback(d);a.render.queue.splice(0,f),a.render.queue.length?setTimeout(c):(a.dispatch.render_end(),a.render.active=!1)};setTimeout(c)},a.render.active=!1,a.render.queue=[],a.addGraph=function(b){typeof arguments[0]==typeof Function&&(b={generate:arguments[0],callback:arguments[1]}),a.render.queue.push(b),a.render.active||a.render()},"undefined"!=typeof module&&"undefined"!=typeof exports&&(module.exports=a),"undefined"!=typeof window&&(window.nv=a),a.dom.write=function(a){return void 0!==window.fastdom?fastdom.mutate(a):a()},a.dom.read=function(a){return void 0!==window.fastdom?fastdom.measure(a):a()},a.interactiveGuideline=function(){"use strict";function b(l){l.each(function(l){function m(){var a=d3.mouse(this),d=a[0],e=a[1],h=!0,i=!1;if(k&&(d=d3.event.offsetX,e=d3.event.offsetY,"svg"!==d3.event.target.tagName&&(h=!1),d3.event.target.className.baseVal.match("nv-legend")&&(i=!0)),h&&(d-=c.left,e-=c.top),"mouseout"===d3.event.type||0>d||0>e||d>o||e>p||d3.event.relatedTarget&&void 0===d3.event.relatedTarget.ownerSVGElement||i){if(k&&d3.event.relatedTarget&&void 0===d3.event.relatedTarget.ownerSVGElement&&(void 0===d3.event.relatedTarget.className||d3.event.relatedTarget.className.match(j.nvPointerEventsClass)))return;return g.elementMouseout({mouseX:d,mouseY:e}),b.renderGuideLine(null),void j.hidden(!0)}j.hidden(!1);var l="function"==typeof f.rangeBands,m=void 0;if(l){var n=d3.bisect(f.range(),d)-1;if(!(f.range()[n]+f.rangeBand()>=d))return g.elementMouseout({mouseX:d,mouseY:e}),b.renderGuideLine(null),void j.hidden(!0);m=f.domain()[d3.bisect(f.range(),d)-1]}else m=f.invert(d);g.elementMousemove({mouseX:d,mouseY:e,pointXValue:m}),"dblclick"===d3.event.type&&g.elementDblclick({mouseX:d,mouseY:e,pointXValue:m}),"click"===d3.event.type&&g.elementClick({mouseX:d,mouseY:e,pointXValue:m}),"mousedown"===d3.event.type&&g.elementMouseDown({mouseX:d,mouseY:e,pointXValue:m}),"mouseup"===d3.event.type&&g.elementMouseUp({mouseX:d,mouseY:e,pointXValue:m})}var n=d3.select(this),o=d||960,p=e||400,q=n.selectAll("g.nv-wrap.nv-interactiveLineLayer").data([l]),r=q.enter().append("g").attr("class"," nv-wrap nv-interactiveLineLayer");r.append("g").attr("class","nv-interactiveGuideLine"),i&&(i.on("touchmove",m).on("mousemove",m,!0).on("mouseout",m,!0).on("mousedown",m,!0).on("mouseup",m,!0).on("dblclick",m).on("click",m),b.guideLine=null,b.renderGuideLine=function(c){h&&(b.guideLine&&b.guideLine.attr("x1")===c||a.dom.write(function(){var b=q.select(".nv-interactiveGuideLine").selectAll("line").data(null!=c?[a.utils.NaNtoZero(c)]:[],String);b.enter().append("line").attr("class","nv-guideline").attr("x1",function(a){return a}).attr("x2",function(a){return a}).attr("y1",p).attr("y2",0),b.exit().remove()}))})})}var c={left:0,top:0},d=null,e=null,f=d3.scale.linear(),g=d3.dispatch("elementMousemove","elementMouseout","elementClick","elementDblclick","elementMouseDown","elementMouseUp"),h=!0,i=null,j=a.models.tooltip(),k=window.ActiveXObject;return j.duration(0).hideDelay(0).hidden(!1),b.dispatch=g,b.tooltip=j,b.margin=function(a){return arguments.length?(c.top="undefined"!=typeof a.top?a.top:c.top,c.left="undefined"!=typeof a.left?a.left:c.left,b):c},b.width=function(a){return arguments.length?(d=a,b):d},b.height=function(a){return arguments.length?(e=a,b):e},b.xScale=function(a){return arguments.length?(f=a,b):f},b.showGuideLine=function(a){return arguments.length?(h=a,b):h},b.svgContainer=function(a){return arguments.length?(i=a,b):i},b},a.interactiveBisect=function(a,b,c){"use strict";if(!(a instanceof Array))return null;var d;d="function"!=typeof c?function(a){return a.x}:c;var e=function(a,b){return d(a)-b},f=d3.bisector(e).left,g=d3.max([0,f(a,b)-1]),h=d(a[g]);if("undefined"==typeof h&&(h=g),h===b)return g;var i=d3.min([g+1,a.length-1]),j=d(a[i]);return"undefined"==typeof j&&(j=i),Math.abs(j-b)>=Math.abs(h-b)?g:i},a.nearestValueIndex=function(a,b,c){"use strict";var d=1/0,e=null;return a.forEach(function(a,f){var g=Math.abs(b-a);null!=a&&d>=g&&c>g&&(d=g,e=f)}),e},a.models.tooltip=function(){"use strict";function b(){if(!l||!l.node()){var a=[1];l=d3.select(document.body).select("#"+d).data(a),l.enter().append("div").attr("class","nvtooltip "+(i?i:"xy-tooltip")).attr("id",d).style("top",0).style("left",0).style("opacity",0).style("position","fixed").selectAll("div, table, td, tr").classed(q,!0).classed(q,!0),l.exit().remove()}}function c(){return n&&w(e)?(a.dom.write(function(){b();var a=u(e);a&&(l.node().innerHTML=a),y()}),c):void 0}var d="nvtooltip-"+Math.floor(1e5*Math.random()),e=null,f="w",g=25,h=0,i=null,j=!0,k=200,l=null,m={left:null,top:null},n=!0,o=100,p=!0,q="nv-pointer-events-none",r=function(a,b){return a},s=function(a){return a},t=function(a,b){return a},u=function(a){if(null===a)return"";var b=d3.select(document.createElement("table"));if(p){var c=b.selectAll("thead").data([a]).enter().append("thead");c.append("tr").append("td").attr("colspan",3).append("strong").classed("x-value",!0).html(s(a.value))}var d=b.selectAll("tbody").data([a]).enter().append("tbody"),e=d.selectAll("tr").data(function(a){return a.series}).enter().append("tr").classed("highlight",function(a){return a.highlight});e.append("td").classed("legend-color-guide",!0).append("div").style("background-color",function(a){return a.color}),e.append("td").classed("key",!0).classed("total",function(a){return!!a.total}).html(function(a,b){return t(a.key,b)}),e.append("td").classed("value",!0).html(function(a,b){return r(a.value,b)}),e.filter(function(a,b){return void 0!==a.percent}).append("td").classed("percent",!0).html(function(a,b){return"("+d3.format("%")(a.percent)+")"}),e.selectAll("td").each(function(a){if(a.highlight){var b=d3.scale.linear().domain([0,1]).range(["#fff",a.color]),c=.6;d3.select(this).style("border-bottom-color",b(c)).style("border-top-color",b(c))}});var f=b.node().outerHTML;return void 0!==a.footer&&(f+="<div class='footer'>"+a.footer+"</div>"),f},v=function(){var a={left:null!==d3.event?d3.event.clientX:0,top:null!==d3.event?d3.event.clientY:0};if("none"!=getComputedStyle(document.body).transform){var b=document.body.getBoundingClientRect();a.left-=b.left,a.top-=b.top}return a},w=function(b){if(b&&b.series){if(a.utils.isArray(b.series))return!0;if(a.utils.isObject(b.series))return b.series=[b.series],!0}return!1},x=function(a){var b,c,d,e=l.node().offsetHeight,h=l.node().offsetWidth,i=document.documentElement.clientWidth,j=document.documentElement.clientHeight;switch(f){case"e":b=-h-g,c=-(e/2),a.left+b<0&&(b=g),(d=a.top+c)<0&&(c-=d),(d=a.top+c+e)>j&&(c-=d-j);break;case"w":b=g,c=-(e/2),a.left+b+h>i&&(b=-h-g),(d=a.top+c)<0&&(c-=d),(d=a.top+c+e)>j&&(c-=d-j);break;case"n":b=-(h/2)-5,c=g,a.top+c+e>j&&(c=-e-g),(d=a.left+b)<0&&(b-=d),(d=a.left+b+h)>i&&(b-=d-i);break;case"s":b=-(h/2),c=-e-g,a.top+c<0&&(c=g),(d=a.left+b)<0&&(b-=d),(d=a.left+b+h)>i&&(b-=d-i);break;case"center":b=-(h/2),c=-(e/2);break;default:b=0,c=0}return{left:b,top:c}},y=function(){a.dom.read(function(){var a=v(),b=x(a),c=a.left+b.left,d=a.top+b.top;if(j)l.interrupt().transition().delay(k).duration(0).style("opacity",0);else{var e="translate("+m.left+"px, "+m.top+"px)",f="translate("+Math.round(c)+"px, "+Math.round(d)+"px)",g=d3.interpolateString(e,f),h=l.style("opacity")<.1;l.interrupt().transition().duration(h?0:o).styleTween("transform",function(a){return g},"important").styleTween("-webkit-transform",function(a){return g}).style("-ms-transform",f).style("opacity",1)}m.left=c,m.top=d})};return c.nvPointerEventsClass=q,c.options=a.utils.optionsFunc.bind(c),c._options=Object.create({},{duration:{get:function(){return o},set:function(a){o=a}},gravity:{get:function(){return f},set:function(a){f=a}},distance:{get:function(){return g},set:function(a){g=a}},snapDistance:{get:function(){return h},set:function(a){h=a}},classes:{get:function(){return i},set:function(a){i=a}},enabled:{get:function(){return n},set:function(a){n=a}},hideDelay:{get:function(){return k},set:function(a){k=a}},contentGenerator:{get:function(){return u},set:function(a){u=a}},valueFormatter:{get:function(){return r},set:function(a){r=a}},headerFormatter:{get:function(){return s},set:function(a){s=a}},keyFormatter:{get:function(){return t},set:function(a){t=a}},headerEnabled:{get:function(){return p},set:function(a){p=a}},position:{get:function(){return v},set:function(a){v=a}},chartContainer:{get:function(){return document.body},set:function(b){a.deprecated("chartContainer","feature removed after 1.8.3")}},fixedTop:{get:function(){return null},set:function(b){a.deprecated("fixedTop","feature removed after 1.8.1")}},offset:{get:function(){return{left:0,top:0}},set:function(b){a.deprecated("offset","use chart.tooltip.distance() instead")}},hidden:{get:function(){return j},set:function(a){j!=a&&(j=!!a,c())}},data:{get:function(){return e},set:function(a){a.point&&(a.value=a.point.x,a.series=a.series||{},a.series.value=a.point.y,a.series.color=a.point.color||a.series.color),e=a}},node:{get:function(){return l.node()},set:function(a){}},id:{get:function(){return d},set:function(a){}}}),a.utils.initOptions(c),c},a.utils.windowSize=function(){var a={width:640,height:480};return window.innerWidth&&window.innerHeight?(a.width=window.innerWidth,a.height=window.innerHeight,a):"CSS1Compat"==document.compatMode&&document.documentElement&&document.documentElement.offsetWidth?(a.width=document.documentElement.offsetWidth,a.height=document.documentElement.offsetHeight,a):document.body&&document.body.offsetWidth?(a.width=document.body.offsetWidth,a.height=document.body.offsetHeight,a):a},a.utils.isArray=Array.isArray,a.utils.isObject=function(a){return null!==a&&"object"==typeof a},a.utils.isFunction=function(a){return"function"==typeof a},a.utils.isDate=function(a){return"[object Date]"===toString.call(a)},a.utils.isNumber=function(a){return!isNaN(a)&&"number"==typeof a},a.utils.windowResize=function(b){return window.addEventListener?window.addEventListener("resize",b):a.log("ERROR: Failed to bind to window.resize with: ",b),{callback:b,clear:function(){window.removeEventListener("resize",b)}}},a.utils.getColor=function(b){if(void 0===b)return a.utils.defaultColor();if(a.utils.isArray(b)){var c=d3.scale.ordinal().range(b);return function(a,b){var d=void 0===b?a:b;return a.color||c(d)}}return b},a.utils.defaultColor=function(){return a.utils.getColor(d3.scale.category20().range())},a.utils.customTheme=function(b,c,d){c=c||function(a){return a.key},d=d||d3.scale.category20().range();var e=d.length;return function(f,g){var h=c(f);return a.utils.isFunction(b[h])?b[h]():void 0!==b[h]?b[h]:(e||(e=d.length),e-=1,d[e])}},a.utils.pjax=function(b,c){var d=function(d){d3.html(d,function(d){var e=d3.select(c).node();e.parentNode.replaceChild(d3.select(d).select(c).node(),e),a.utils.pjax(b,c)})};d3.selectAll(b).on("click",function(){history.pushState(this.href,this.textContent,this.href),d(this.href),d3.event.preventDefault()}),d3.select(window).on("popstate",function(){d3.event.state&&d(d3.event.state)})},a.utils.calcApproxTextWidth=function(b){if(a.utils.isFunction(b.style)&&a.utils.isFunction(b.text)){var c=parseInt(b.style("font-size").replace("px",""),10),d=b.text().length;return a.utils.NaNtoZero(d*c*.5)}return 0},a.utils.NaNtoZero=function(b){return!a.utils.isNumber(b)||isNaN(b)||null===b||b===1/0||b===-(1/0)?0:b},d3.selection.prototype.watchTransition=function(a){var b=[this].concat([].slice.call(arguments,1));return a.transition.apply(a,b)},a.utils.renderWatch=function(b,c){if(!(this instanceof a.utils.renderWatch))return new a.utils.renderWatch(b,c);var d=void 0!==c?c:250,e=[],f=this;this.models=function(a){return a=[].slice.call(arguments,0),a.forEach(function(a){a.__rendered=!1,function(a){a.dispatch.on("renderEnd",function(b){a.__rendered=!0,f.renderEnd("model")})}(a),e.indexOf(a)<0&&e.push(a)}),this},this.reset=function(a){void 0!==a&&(d=a),e=[]},this.transition=function(a,b,c){if(b=arguments.length>1?[].slice.call(arguments,1):[],c=b.length>1?b.pop():void 0!==d?d:250,a.__rendered=!1,e.indexOf(a)<0&&e.push(a),0===c)return a.__rendered=!0,a.delay=function(){return this},a.duration=function(){return this},a;0===a.length?a.__rendered=!0:a.every(function(a){return!a.length})?a.__rendered=!0:a.__rendered=!1;var g=0;return a.transition().duration(c).each(function(){++g}).each("end",function(c,d){0===--g&&(a.__rendered=!0,f.renderEnd.apply(this,b))})},this.renderEnd=function(){e.every(function(a){return a.__rendered})&&(e.forEach(function(a){a.__rendered=!1}),b.renderEnd.apply(this,arguments))}},a.utils.deepExtend=function(b){var c=arguments.length>1?[].slice.call(arguments,1):[];c.forEach(function(c){for(var d in c){var e=a.utils.isArray(b[d]),f=a.utils.isObject(b[d]),g=a.utils.isObject(c[d]);f&&!e&&g?a.utils.deepExtend(b[d],c[d]):b[d]=c[d]}})},a.utils.state=function(){if(!(this instanceof a.utils.state))return new a.utils.state;var b={},c=function(){},d=function(){return{}},e=null,f=null;this.dispatch=d3.dispatch("change","set"),this.dispatch.on("set",function(a){c(a,!0)}),this.getter=function(a){return d=a,this},this.setter=function(a,b){return b||(b=function(){}),c=function(c,d){a(c),d&&b()},this},this.init=function(b){e=e||{},a.utils.deepExtend(e,b)};var g=function(){var a=d();if(JSON.stringify(a)===JSON.stringify(b))return!1;for(var c in a)void 0===b[c]&&(b[c]={}),b[c]=a[c],f=!0;return!0};this.update=function(){e&&(c(e,!1),e=null),g.call(this)&&this.dispatch.change(b)}},a.utils.optionsFunc=function(b){return b&&d3.map(b).forEach(function(b,c){a.utils.isFunction(this[b])&&this[b](c)}.bind(this)),this},a.utils.calcTicksX=function(b,c){var d=1,e=0;for(e;e<c.length;e+=1){var f=c[e]&&c[e].values?c[e].values.length:0;d=f>d?f:d}return a.log("Requested number of ticks: ",b),a.log("Calculated max values to be: ",d),b=b>d?b=d-1:b,b=1>b?1:b,b=Math.floor(b),a.log("Calculating tick count as: ",b),b},a.utils.calcTicksY=function(b,c){return a.utils.calcTicksX(b,c)},a.utils.initOption=function(a,b){a._calls&&a._calls[b]?a[b]=a._calls[b]:(a[b]=function(c){return arguments.length?(a._overrides[b]=!0,a._options[b]=c,a):a._options[b]},a["_"+b]=function(c){return arguments.length?(a._overrides[b]||(a._options[b]=c),a):a._options[b]})},a.utils.initOptions=function(b){b._overrides=b._overrides||{};var c=Object.getOwnPropertyNames(b._options||{}),d=Object.getOwnPropertyNames(b._calls||{});c=c.concat(d);for(var e in c)a.utils.initOption(b,c[e])},a.utils.inheritOptionsD3=function(a,b,c){a._d3options=c.concat(a._d3options||[]),c.unshift(b),c.unshift(a),d3.rebind.apply(this,c)},a.utils.arrayUnique=function(a){return a.sort().filter(function(b,c){return!c||b!=a[c-1]})},a.utils.symbolMap=d3.map(),a.utils.symbol=function(){function b(b,e){var f=c.call(this,b,e),g=d.call(this,b,e);return-1!==d3.svg.symbolTypes.indexOf(f)?d3.svg.symbol().type(f).size(g)():a.utils.symbolMap.get(f)(g)}var c,d=64;return b.type=function(a){return arguments.length?(c=d3.functor(a),b):c},b.size=function(a){return arguments.length?(d=d3.functor(a),b):d},b},a.utils.inheritOptions=function(b,c){var d=Object.getOwnPropertyNames(c._options||{}),e=Object.getOwnPropertyNames(c._calls||{}),f=c._inherited||[],g=c._d3options||[],h=d.concat(e).concat(f).concat(g);h.unshift(c),h.unshift(b),d3.rebind.apply(this,h),b._inherited=a.utils.arrayUnique(d.concat(e).concat(f).concat(d).concat(b._inherited||[])),b._d3options=a.utils.arrayUnique(g.concat(b._d3options||[]))},a.utils.initSVG=function(a){a.classed({"nvd3-svg":!0})},a.utils.sanitizeHeight=function(a,b){return a||parseInt(b.style("height"),10)||400},a.utils.sanitizeWidth=function(a,b){return a||parseInt(b.style("width"),10)||960},a.utils.availableHeight=function(b,c,d){return Math.max(0,a.utils.sanitizeHeight(b,c)-d.top-d.bottom)},a.utils.availableWidth=function(b,c,d){return Math.max(0,a.utils.sanitizeWidth(b,c)-d.left-d.right)},a.utils.noData=function(b,c){var d=b.options(),e=d.margin(),f=d.noData(),g=null==f?["No Data Available."]:[f],h=a.utils.availableHeight(null,c,e),i=a.utils.availableWidth(null,c,e),j=e.left+i/2,k=e.top+h/2;c.selectAll("g").remove();var l=c.selectAll(".nv-noData").data(g);l.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),l.attr("x",j).attr("y",k).text(function(a){return a})},a.utils.wrapTicks=function(a,b){a.each(function(){for(var a,c=d3.select(this),d=c.text().split(/\s+/).reverse(),e=[],f=0,g=1.1,h=c.attr("y"),i=parseFloat(c.attr("dy")),j=c.text(null).append("tspan").attr("x",0).attr("y",h).attr("dy",i+"em");a=d.pop();)e.push(a),j.text(e.join(" ")),j.node().getComputedTextLength()>b&&(e.pop(),j.text(e.join(" ")),e=[a],j=c.append("tspan").attr("x",0).attr("y",h).attr("dy",++f*g+i+"em").text(a))})},a.utils.arrayEquals=function(b,c){if(b===c)return!0;if(!b||!c)return!1;if(b.length!=c.length)return!1;for(var d=0,e=b.length;e>d;d++)if(b[d]instanceof Array&&c[d]instanceof Array){if(!a.arrayEquals(b[d],c[d]))return!1}else if(b[d]!=c[d])return!1;return!0},a.models.axis=function(){"use strict";function b(g){return t.reset(),g.each(function(b){var g=d3.select(this);a.utils.initSVG(g);var q=g.selectAll("g.nv-wrap.nv-axis").data([b]),r=q.enter().append("g").attr("class","nvd3 nv-wrap nv-axis"),u=(r.append("g"),q.select("g"));null!==n?c.ticks(n):("top"==c.orient()||"bottom"==c.orient())&&c.ticks(Math.abs(d.range()[1]-d.range()[0])/100),u.watchTransition(t,"axis").call(c),s=s||c.scale();var v=c.tickFormat();null==v&&(v=s.tickFormat());var w=u.selectAll("text.nv-axislabel").data([h||null]);w.exit().remove(),void 0!==p&&u.selectAll("g").select("text").style("font-size",p);var x,y,z;switch(c.orient()){case"top":w.enter().append("text").attr("class","nv-axislabel"),z=0,1===d.range().length?z=m?2*d.range()[0]+d.rangeBand():0:2===d.range().length?z=m?d.range()[0]+d.range()[1]+d.rangeBand():d.range()[1]:d.range().length>2&&(z=d.range()[d.range().length-1]+(d.range()[1]-d.range()[0])),w.attr("text-anchor","middle").attr("y",0).attr("x",z/2),i&&(y=q.selectAll("g.nv-axisMaxMin").data(d.domain()),y.enter().append("g").attr("class",function(a,b){return["nv-axisMaxMin","nv-axisMaxMin-x",0==b?"nv-axisMin-x":"nv-axisMax-x"].join(" ")}).append("text"),y.exit().remove(),y.attr("transform",function(b,c){return"translate("+a.utils.NaNtoZero(d(b))+",0)"}).select("text").attr("dy","-0.5em").attr("y",-c.tickPadding()).attr("text-anchor","middle").text(function(a,b){var c=v(a);return(""+c).match("NaN")?"":c}),y.watchTransition(t,"min-max top").attr("transform",function(b,c){return"translate("+a.utils.NaNtoZero(d.range()[c])+",0)"}));break;case"bottom":x=o+36;var A=30,B=0,C=u.selectAll("g").select("text"),D="";if(j%360){C.attr("transform",""),C.each(function(a,b){var c=this.getBoundingClientRect(),d=c.width;B=c.height,d>A&&(A=d)}),D="rotate("+j+" 0,"+(B/2+c.tickPadding())+")";var E=Math.abs(Math.sin(j*Math.PI/180));x=(E?E*A:A)+30,C.attr("transform",D).style("text-anchor",j%360>0?"start":"end")}else l?C.attr("transform",function(a,b){return"translate(0,"+(b%2==0?"0":"12")+")"}):C.attr("transform","translate(0,0)");w.enter().append("text").attr("class","nv-axislabel"),z=0,1===d.range().length?z=m?2*d.range()[0]+d.rangeBand():0:2===d.range().length?z=m?d.range()[0]+d.range()[1]+d.rangeBand():d.range()[1]:d.range().length>2&&(z=d.range()[d.range().length-1]+(d.range()[1]-d.range()[0])),w.attr("text-anchor","middle").attr("y",x).attr("x",z/2),i&&(y=q.selectAll("g.nv-axisMaxMin").data([d.domain()[0],d.domain()[d.domain().length-1]]),y.enter().append("g").attr("class",function(a,b){return["nv-axisMaxMin","nv-axisMaxMin-x",0==b?"nv-axisMin-x":"nv-axisMax-x"].join(" ")}).append("text"),y.exit().remove(),y.attr("transform",function(b,c){return"translate("+a.utils.NaNtoZero(d(b)+(m?d.rangeBand()/2:0))+",0)"}).select("text").attr("dy",".71em").attr("y",c.tickPadding()).attr("transform",D).style("text-anchor",j?j%360>0?"start":"end":"middle").text(function(a,b){var c=v(a);return(""+c).match("NaN")?"":c}),y.watchTransition(t,"min-max bottom").attr("transform",function(b,c){return"translate("+a.utils.NaNtoZero(d(b)+(m?d.rangeBand()/2:0))+",0)"}));break;case"right":w.enter().append("text").attr("class","nv-axislabel"),w.style("text-anchor",k?"middle":"begin").attr("transform",k?"rotate(90)":"").attr("y",k?-Math.max(e.right,f)+12-(o||0):-10).attr("x",k?d3.max(d.range())/2:c.tickPadding()),i&&(y=q.selectAll("g.nv-axisMaxMin").data(d.domain()),y.enter().append("g").attr("class",function(a,b){return["nv-axisMaxMin","nv-axisMaxMin-y",0==b?"nv-axisMin-y":"nv-axisMax-y"].join(" ")}).append("text").style("opacity",0),y.exit().remove(),y.attr("transform",function(b,c){return"translate(0,"+a.utils.NaNtoZero(d(b))+")"}).select("text").attr("dy",".32em").attr("y",0).attr("x",c.tickPadding()).style("text-anchor","start").text(function(a,b){var c=v(a);return(""+c).match("NaN")?"":c}),y.watchTransition(t,"min-max right").attr("transform",function(b,c){return"translate(0,"+a.utils.NaNtoZero(d.range()[c])+")"}).select("text").style("opacity",1));break;case"left":w.enter().append("text").attr("class","nv-axislabel"),w.style("text-anchor",k?"middle":"end").attr("transform",k?"rotate(-90)":"").attr("y",k?-Math.max(e.left,f)+25-(o||0):-10).attr("x",k?-d3.max(d.range())/2:-c.tickPadding()),i&&(y=q.selectAll("g.nv-axisMaxMin").data(d.domain()),y.enter().append("g").attr("class",function(a,b){return["nv-axisMaxMin","nv-axisMaxMin-y",0==b?"nv-axisMin-y":"nv-axisMax-y"].join(" ")}).append("text").style("opacity",0),y.exit().remove(),y.attr("transform",function(b,c){return"translate(0,"+a.utils.NaNtoZero(s(b))+")"}).select("text").attr("dy",".32em").attr("y",0).attr("x",-c.tickPadding()).attr("text-anchor","end").text(function(a,b){var c=v(a);return(""+c).match("NaN")?"":c}),y.watchTransition(t,"min-max right").attr("transform",function(b,c){return"translate(0,"+a.utils.NaNtoZero(d.range()[c])+")"}).select("text").style("opacity",1))}if(w.text(function(a){return a}),!i||"left"!==c.orient()&&"right"!==c.orient()||(u.selectAll("g").each(function(a,b){d3.select(this).select("text").attr("opacity",1),(d(a)<d.range()[1]+10||d(a)>d.range()[0]-10)&&((a>1e-10||-1e-10>a)&&d3.select(this).attr("opacity",0),d3.select(this).select("text").attr("opacity",0))}),d.domain()[0]==d.domain()[1]&&0==d.domain()[0]&&q.selectAll("g.nv-axisMaxMin").style("opacity",function(a,b){return b?0:1})),i&&("top"===c.orient()||"bottom"===c.orient())){var F=[];q.selectAll("g.nv-axisMaxMin").each(function(a,b){try{b?F.push(d(a)-this.getBoundingClientRect().width-4):F.push(d(a)+this.getBoundingClientRect().width+4)}catch(c){b?F.push(d(a)-4):F.push(d(a)+4)}}),u.selectAll("g").each(function(a,b){(d(a)<F[0]||d(a)>F[1])&&(a>1e-10||-1e-10>a?d3.select(this).remove():d3.select(this).select("text").remove())})}u.selectAll(".tick").filter(function(a){return!parseFloat(Math.round(1e5*a)/1e6)&&void 0!==a}).classed("zero",!0),s=d.copy()}),t.renderEnd("axis immediate"),b}var c=d3.svg.axis(),d=d3.scale.linear(),e={top:0,right:0,bottom:0,left:0},f=75,g=60,h=null,i=!0,j=0,k=!0,l=!1,m=!1,n=null,o=0,p=void 0,q=250,r=d3.dispatch("renderEnd");c.scale(d).orient("bottom").tickFormat(function(a){return a});var s,t=a.utils.renderWatch(r,q);return b.axis=c,b.dispatch=r,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{axisLabelDistance:{get:function(){return o},set:function(a){o=a}},staggerLabels:{get:function(){return l},set:function(a){l=a}},rotateLabels:{get:function(){return j},set:function(a){j=a}},rotateYLabel:{get:function(){return k},set:function(a){k=a}},showMaxMin:{get:function(){return i},set:function(a){i=a}},axisLabel:{get:function(){return h},set:function(a){h=a}},height:{get:function(){return g},set:function(a){g=a}},ticks:{get:function(){return n},set:function(a){n=a}},width:{get:function(){return f},set:function(a){f=a}},fontSize:{get:function(){return p},set:function(a){p=a}},margin:{get:function(){return e},set:function(a){e.top=void 0!==a.top?a.top:e.top,e.right=void 0!==a.right?a.right:e.right,e.bottom=void 0!==a.bottom?a.bottom:e.bottom,e.left=void 0!==a.left?a.left:e.left}},duration:{get:function(){return q},set:function(a){q=a,t.reset(q)}},scale:{get:function(){return d},set:function(e){d=e,c.scale(d),m="function"==typeof d.rangeBands,a.utils.inheritOptionsD3(b,d,["domain","range","rangeBand","rangeBands"])}}}),a.utils.initOptions(b),a.utils.inheritOptionsD3(b,c,["orient","tickValues","tickSubdivide","tickSize","tickPadding","tickFormat"]),a.utils.inheritOptionsD3(b,d,["domain","range","rangeBand","rangeBands"]),b},a.models.boxPlot=function(){"use strict";function b(l){return E.reset(),l.each(function(b){var l=j-i.left-i.right,F=k-i.top-i.bottom;A=d3.select(this),a.utils.initSVG(A),m.domain(c||b.map(function(a,b){return o(a,b)})).rangeBands(d||[0,l],.1);var G=[];if(!e){var H,I,J=[];b.forEach(function(a,b){var c=p(a),d=r(a),e=s(a),f=t(a),g=v(a);g&&g.forEach(function(a,b){J.push(w(a,b,void 0))}),e&&J.push(e),c&&J.push(c),d&&J.push(d),f&&J.push(f)}),H=d3.min(J),I=d3.max(J),G=[H,I]}n.domain(e||G),n.range(f||[F,0]),g=g||m,h=h||n.copy().range([n(0),n(0)]);var K=A.selectAll("g.nv-wrap").data([b]);K.enter().append("g").attr("class","nvd3 nv-wrap");K.attr("transform","translate("+i.left+","+i.top+")");var L=K.selectAll(".nv-boxplot").data(function(a){return a}),M=L.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6);L.attr("class","nv-boxplot").attr("transform",function(a,b,c){return"translate("+(m(o(a,b))+.05*m.rangeBand())+", 0)"}).classed("hover",function(a){return a.hover}),L.watchTransition(E,"nv-boxplot: boxplots").style("stroke-opacity",1).style("fill-opacity",.75).delay(function(a,c){return c*C/b.length}).attr("transform",function(a,b){return"translate("+(m(o(a,b))+.05*m.rangeBand())+", 0)"}),L.exit().remove(),M.each(function(a,b){var c=d3.select(this);[s,t].forEach(function(d){if(void 0!==d(a)&&null!==d(a)){var e=d===s?"low":"high";c.append("line").style("stroke",u(a)||z(a,b)).attr("class","nv-boxplot-whisker nv-boxplot-"+e),c.append("line").style("stroke",u(a)||z(a,b)).attr("class","nv-boxplot-tick nv-boxplot-"+e)}})});var N=function(){return null===D?.9*m.rangeBand():Math.min(75,.9*m.rangeBand())},O=function(){return.45*m.rangeBand()-N()/2},P=function(){return.45*m.rangeBand()+N()/2};[s,t].forEach(function(a){var b=a===s?"low":"high",c=a===s?p:r;L.select("line.nv-boxplot-whisker.nv-boxplot-"+b).watchTransition(E,"nv-boxplot: boxplots").attr("x1",.45*m.rangeBand()).attr("y1",function(b,c){return n(a(b))}).attr("x2",.45*m.rangeBand()).attr("y2",function(a,b){return n(c(a))}),L.select("line.nv-boxplot-tick.nv-boxplot-"+b).watchTransition(E,"nv-boxplot: boxplots").attr("x1",O).attr("y1",function(b,c){return n(a(b))}).attr("x2",P).attr("y2",function(b,c){return n(a(b))})}),[s,t].forEach(function(a){var b=a===s?"low":"high";M.selectAll(".nv-boxplot-"+b).on("mouseover",function(b,c,d){d3.select(this).classed("hover",!0),B.elementMouseover({series:{key:a(b),color:u(b)||z(b,d)},e:d3.event})}).on("mouseout",function(b,c,d){d3.select(this).classed("hover",!1),B.elementMouseout({series:{key:a(b),color:u(b)||z(b,d)},e:d3.event})}).on("mousemove",function(a,b){B.elementMousemove({e:d3.event})})}),M.append("rect").attr("class","nv-boxplot-box").on("mouseover",function(a,b){d3.select(this).classed("hover",!0),B.elementMouseover({key:o(a),value:o(a),series:[{key:"Q3",value:r(a),color:u(a)||z(a,b)},{key:"Q2",value:q(a),color:u(a)||z(a,b)},{key:"Q1",value:p(a),color:u(a)||z(a,b)}],data:a,index:b,e:d3.event})}).on("mouseout",function(a,b){d3.select(this).classed("hover",!1),B.elementMouseout({key:o(a),value:o(a),series:[{key:"Q3",value:r(a),color:u(a)||z(a,b)},{key:"Q2",value:q(a),color:u(a)||z(a,b)},{key:"Q1",value:p(a),color:u(a)||z(a,b)}],data:a,index:b,e:d3.event})}).on("mousemove",function(a,b){B.elementMousemove({e:d3.event})}),L.select("rect.nv-boxplot-box").watchTransition(E,"nv-boxplot: boxes").attr("y",function(a,b){return n(r(a))}).attr("width",N).attr("x",O).attr("height",function(a,b){return Math.abs(n(r(a))-n(p(a)))||1}).style("fill",function(a,b){return u(a)||z(a,b)}).style("stroke",function(a,b){return u(a)||z(a,b)}),M.append("line").attr("class","nv-boxplot-median"),L.select("line.nv-boxplot-median").watchTransition(E,"nv-boxplot: boxplots line").attr("x1",O).attr("y1",function(a,b){return n(q(a))}).attr("x2",P).attr("y2",function(a,b){return n(q(a))});var Q=L.selectAll(".nv-boxplot-outlier").data(function(a){return v(a)||[]});Q.enter().append("circle").style("fill",function(a,b,c){return y(a,b,c)||z(a,c)}).style("stroke",function(a,b,c){return y(a,b,c)||z(a,c)}).style("z-index",9e3).on("mouseover",function(a,b,c){d3.select(this).classed("hover",!0),B.elementMouseover({series:{key:x(a,b,c),color:y(a,b,c)||z(a,c)},e:d3.event})}).on("mouseout",function(a,b,c){d3.select(this).classed("hover",!1),B.elementMouseout({series:{key:x(a,b,c),color:y(a,b,c)||z(a,c)},e:d3.event})}).on("mousemove",function(a,b){B.elementMousemove({e:d3.event})}),Q.attr("class","nv-boxplot-outlier"),Q.watchTransition(E,"nv-boxplot: nv-boxplot-outlier").attr("cx",.45*m.rangeBand()).attr("cy",function(a,b,c){return n(w(a,b,c))}).attr("r","3"),Q.exit().remove(),g=m.copy(),h=n.copy()}),E.renderEnd("nv-boxplot immediate"),b}var c,d,e,f,g,h,i={top:0,right:0,bottom:0,left:0},j=960,k=500,l=Math.floor(1e4*Math.random()),m=d3.scale.ordinal(),n=d3.scale.linear(),o=function(a){return a.label},p=function(a){return a.values.Q1},q=function(a){return a.values.Q2},r=function(a){return a.values.Q3},s=function(a){return a.values.whisker_low},t=function(a){return a.values.whisker_high},u=function(a){return a.color},v=function(a){return a.values.outliers},w=function(a,b,c){return a},x=function(a,b,c){return a},y=function(a,b,c){return void 0},z=a.utils.defaultColor(),A=null,B=d3.dispatch("elementMouseover","elementMouseout","elementMousemove","renderEnd"),C=250,D=null,E=a.utils.renderWatch(B,C);return b.dispatch=B,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return j},set:function(a){j=a}},height:{get:function(){return k},set:function(a){k=a}},maxBoxWidth:{get:function(){return D},set:function(a){D=a}},x:{get:function(){return o},set:function(a){o=a}},q1:{get:function(){return p},set:function(a){p=a}},q2:{get:function(){return q},set:function(a){q=a}},q3:{get:function(){return r},set:function(a){r=a}},wl:{get:function(){return s},set:function(a){s=a}},wh:{get:function(){return t},set:function(a){t=a}},itemColor:{get:function(){return u},set:function(a){u=a}},outliers:{get:function(){return v},set:function(a){
+v=a}},outlierValue:{get:function(){return w},set:function(a){w=a}},outlierLabel:{get:function(){return x},set:function(a){x=a}},outlierColor:{get:function(){return y},set:function(a){y=a}},xScale:{get:function(){return m},set:function(a){m=a}},yScale:{get:function(){return n},set:function(a){n=a}},xDomain:{get:function(){return c},set:function(a){c=a}},yDomain:{get:function(){return e},set:function(a){e=a}},xRange:{get:function(){return d},set:function(a){d=a}},yRange:{get:function(){return f},set:function(a){f=a}},id:{get:function(){return l},set:function(a){l=a}},y:{get:function(){return console.warn("BoxPlot 'y' chart option is deprecated. Please use model overrides instead."),{}},set:function(a){console.warn("BoxPlot 'y' chart option is deprecated. Please use model overrides instead.")}},margin:{get:function(){return i},set:function(a){i.top=void 0!==a.top?a.top:i.top,i.right=void 0!==a.right?a.right:i.right,i.bottom=void 0!==a.bottom?a.bottom:i.bottom,i.left=void 0!==a.left?a.left:i.left}},color:{get:function(){return z},set:function(b){z=a.utils.getColor(b)}},duration:{get:function(){return C},set:function(a){C=a,E.reset(C)}}}),a.utils.initOptions(b),b},a.models.boxPlotChart=function(){"use strict";function b(k){return t.reset(),t.models(e),l&&t.models(f),m&&t.models(g),k.each(function(k){var p=d3.select(this);a.utils.initSVG(p);var t=(i||parseInt(p.style("width"))||960)-h.left-h.right,u=(j||parseInt(p.style("height"))||400)-h.top-h.bottom;if(b.update=function(){r.beforeUpdate(),p.transition().duration(s).call(b)},b.container=this,!k||!k.length){var v=p.selectAll(".nv-noData").data([q]);return v.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),v.attr("x",h.left+t/2).attr("y",h.top+u/2).text(function(a){return a}),b}p.selectAll(".nv-noData").remove(),c=e.xScale(),d=e.yScale().clamp(!0);var w=p.selectAll("g.nv-wrap.nv-boxPlotWithAxes").data([k]),x=w.enter().append("g").attr("class","nvd3 nv-wrap nv-boxPlotWithAxes").append("g"),y=x.append("defs"),z=w.select("g");x.append("g").attr("class","nv-x nv-axis"),x.append("g").attr("class","nv-y nv-axis").append("g").attr("class","nv-zeroLine").append("line"),x.append("g").attr("class","nv-barsWrap"),z.attr("transform","translate("+h.left+","+h.top+")"),n&&z.select(".nv-y.nv-axis").attr("transform","translate("+t+",0)"),e.width(t).height(u);var A=z.select(".nv-barsWrap").datum(k.filter(function(a){return!a.disabled}));if(A.transition().call(e),y.append("clipPath").attr("id","nv-x-label-clip-"+e.id()).append("rect"),z.select("#nv-x-label-clip-"+e.id()+" rect").attr("width",c.rangeBand()*(o?2:1)).attr("height",16).attr("x",-c.rangeBand()/(o?1:2)),l){f.scale(c).ticks(a.utils.calcTicksX(t/100,k)).tickSize(-u,0),z.select(".nv-x.nv-axis").attr("transform","translate(0,"+d.range()[0]+")"),z.select(".nv-x.nv-axis").call(f);var B=z.select(".nv-x.nv-axis").selectAll("g");o&&B.selectAll("text").attr("transform",function(a,b,c){return"translate(0,"+(c%2===0?"5":"17")+")"})}m&&(g.scale(d).ticks(Math.floor(u/36)).tickSize(-t,0),z.select(".nv-y.nv-axis").call(g)),z.select(".nv-zeroLine line").attr("x1",0).attr("x2",t).attr("y1",d(0)).attr("y2",d(0))}),t.renderEnd("nv-boxplot chart immediate"),b}var c,d,e=a.models.boxPlot(),f=a.models.axis(),g=a.models.axis(),h={top:15,right:10,bottom:50,left:60},i=null,j=null,k=a.utils.getColor(),l=!0,m=!0,n=!1,o=!1,p=a.models.tooltip(),q="No Data Available.",r=d3.dispatch("beforeUpdate","renderEnd"),s=250;f.orient("bottom").showMaxMin(!1).tickFormat(function(a){return a}),g.orient(n?"right":"left").tickFormat(d3.format(",.1f")),p.duration(0);var t=a.utils.renderWatch(r,s);return e.dispatch.on("elementMouseover.tooltip",function(a){p.data(a).hidden(!1)}),e.dispatch.on("elementMouseout.tooltip",function(a){p.data(a).hidden(!0)}),e.dispatch.on("elementMousemove.tooltip",function(a){p()}),b.dispatch=r,b.boxplot=e,b.xAxis=f,b.yAxis=g,b.tooltip=p,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return i},set:function(a){i=a}},height:{get:function(){return j},set:function(a){j=a}},staggerLabels:{get:function(){return o},set:function(a){o=a}},showXAxis:{get:function(){return l},set:function(a){l=a}},showYAxis:{get:function(){return m},set:function(a){m=a}},tooltipContent:{get:function(){return p},set:function(a){p=a}},noData:{get:function(){return q},set:function(a){q=a}},margin:{get:function(){return h},set:function(a){h.top=void 0!==a.top?a.top:h.top,h.right=void 0!==a.right?a.right:h.right,h.bottom=void 0!==a.bottom?a.bottom:h.bottom,h.left=void 0!==a.left?a.left:h.left}},duration:{get:function(){return s},set:function(a){s=a,t.reset(s),e.duration(s),f.duration(s),g.duration(s)}},color:{get:function(){return k},set:function(b){k=a.utils.getColor(b),e.color(k)}},rightAlignYAxis:{get:function(){return n},set:function(a){n=a,g.orient(a?"right":"left")}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.bullet=function(){"use strict";function b(a,b){var c=a.slice();a.sort(function(a,d){var e=c.indexOf(a),f=c.indexOf(d);return d3.descending(b[e],b[f])})}function c(e){return e.each(function(c,e){var s=p-d.left-d.right,y=q-d.top-d.bottom;r=d3.select(this),a.utils.initSVG(r);var z=g.call(this,c,e).slice(),A=h.call(this,c,e).slice(),B=i.call(this,c,e).slice(),C=j.call(this,c,e).slice(),D=k.call(this,c,e).slice(),E=l.call(this,c,e).slice(),F=m.call(this,c,e).slice(),G=n.call(this,c,e).slice();b(D,z),b(E,A),b(F,B),b(G,C),z.sort(d3.descending),A.sort(d3.descending),B.sort(d3.descending),C.sort(d3.descending);var H=d3.scale.linear().domain(d3.extent(d3.merge([o,z]))).range(f?[s,0]:[0,s]);this.__chart__||d3.scale.linear().domain([0,1/0]).range(H.range());this.__chart__=H;for(var I=(d3.min(z),d3.max(z),z[1],r.selectAll("g.nv-wrap.nv-bullet").data([c])),J=I.enter().append("g").attr("class","nvd3 nv-wrap nv-bullet"),K=J.append("g"),L=I.select("g"),e=0,M=z.length;M>e;e++){var N="nv-range nv-range"+e;2>=e&&(N=N+" nv-range"+w[e]),K.append("rect").attr("class",N)}K.append("rect").attr("class","nv-measure"),I.attr("transform","translate("+d.left+","+d.top+")");for(var O=function(a){return Math.abs(H(a)-H(0))},P=function(a){return H(0>a?a:0)},e=0,M=z.length;M>e;e++){var Q=z[e];L.select("rect.nv-range"+e).datum(Q).attr("height",y).transition().duration(x).attr("width",O(Q)).attr("x",P(Q))}L.select("rect.nv-measure").style("fill",t).attr("height",y/3).attr("y",y/3).on("mouseover",function(){u.elementMouseover({value:C[0],label:G[0]||"Current",color:d3.select(this).style("fill")})}).on("mousemove",function(){u.elementMousemove({value:C[0],label:G[0]||"Current",color:d3.select(this).style("fill")})}).on("mouseout",function(){u.elementMouseout({value:C[0],label:G[0]||"Current",color:d3.select(this).style("fill")})}).transition().duration(x).attr("width",0>C?H(0)-H(C[0]):H(C[0])-H(0)).attr("x",P(C));var R=y/6,S=A.map(function(a,b){return{value:a,label:E[b]}});K.selectAll("path.nv-markerTriangle").data(S).enter().append("path").attr("class","nv-markerTriangle").attr("d","M0,"+R+"L"+R+","+-R+" "+-R+","+-R+"Z").on("mouseover",function(a){u.elementMouseover({value:a.value,label:a.label||"Previous",color:d3.select(this).style("fill"),pos:[H(a.value),y/2]})}).on("mousemove",function(a){u.elementMousemove({value:a.value,label:a.label||"Previous",color:d3.select(this).style("fill")})}).on("mouseout",function(a,b){u.elementMouseout({value:a.value,label:a.label||"Previous",color:d3.select(this).style("fill")})}),L.selectAll("path.nv-markerTriangle").data(S).transition().duration(x).attr("transform",function(a){return"translate("+H(a.value)+","+y/2+")"});var T=B.map(function(a,b){return{value:a,label:F[b]}});K.selectAll("line.nv-markerLine").data(T).enter().append("line").attr("cursor","").attr("class","nv-markerLine").attr("x1",function(a){return H(a.value)}).attr("y1","2").attr("x2",function(a){return H(a.value)}).attr("y2",y-2).on("mouseover",function(a){u.elementMouseover({value:a.value,label:a.label||"Previous",color:d3.select(this).style("fill"),pos:[H(a.value),y/2]})}).on("mousemove",function(a){u.elementMousemove({value:a.value,label:a.label||"Previous",color:d3.select(this).style("fill")})}).on("mouseout",function(a,b){u.elementMouseout({value:a.value,label:a.label||"Previous",color:d3.select(this).style("fill")})}),L.selectAll("line.nv-markerLine").data(T).transition().duration(x).attr("x1",function(a){return H(a.value)}).attr("x2",function(a){return H(a.value)}),I.selectAll(".nv-range").on("mouseover",function(a,b){var c=D[b]||v[b];u.elementMouseover({value:a,label:c,color:d3.select(this).style("fill")})}).on("mousemove",function(){u.elementMousemove({value:C[0],label:G[0]||"Previous",color:d3.select(this).style("fill")})}).on("mouseout",function(a,b){var c=D[b]||v[b];u.elementMouseout({value:a,label:c,color:d3.select(this).style("fill")})})}),c}var d={top:0,right:0,bottom:0,left:0},e="left",f=!1,g=function(a){return a.ranges},h=function(a){return a.markers?a.markers:[]},i=function(a){return a.markerLines?a.markerLines:[0]},j=function(a){return a.measures},k=function(a){return a.rangeLabels?a.rangeLabels:[]},l=function(a){return a.markerLabels?a.markerLabels:[]},m=function(a){return a.markerLineLabels?a.markerLineLabels:[]},n=function(a){return a.measureLabels?a.measureLabels:[]},o=[0],p=380,q=30,r=null,s=null,t=a.utils.getColor(["#1f77b4"]),u=d3.dispatch("elementMouseover","elementMouseout","elementMousemove"),v=["Maximum","Mean","Minimum"],w=["Max","Avg","Min"],x=1e3;return c.dispatch=u,c.options=a.utils.optionsFunc.bind(c),c._options=Object.create({},{ranges:{get:function(){return g},set:function(a){g=a}},markers:{get:function(){return h},set:function(a){h=a}},measures:{get:function(){return j},set:function(a){j=a}},forceX:{get:function(){return o},set:function(a){o=a}},width:{get:function(){return p},set:function(a){p=a}},height:{get:function(){return q},set:function(a){q=a}},tickFormat:{get:function(){return s},set:function(a){s=a}},duration:{get:function(){return x},set:function(a){x=a}},margin:{get:function(){return d},set:function(a){d.top=void 0!==a.top?a.top:d.top,d.right=void 0!==a.right?a.right:d.right,d.bottom=void 0!==a.bottom?a.bottom:d.bottom,d.left=void 0!==a.left?a.left:d.left}},orient:{get:function(){return e},set:function(a){e=a,f="right"==e||"bottom"==e}},color:{get:function(){return t},set:function(b){t=a.utils.getColor(b)}}}),a.utils.initOptions(c),c},a.models.bulletChart=function(){"use strict";function b(d){return d.each(function(e,o){var p=d3.select(this);a.utils.initSVG(p);var q=a.utils.availableWidth(k,p,g),r=l-g.top-g.bottom;if(b.update=function(){b(d)},b.container=this,!e||!h.call(this,e,o))return a.utils.noData(b,p),b;p.selectAll(".nv-noData").remove();var s=h.call(this,e,o).slice().sort(d3.descending),t=i.call(this,e,o).slice().sort(d3.descending),u=j.call(this,e,o).slice().sort(d3.descending),v=p.selectAll("g.nv-wrap.nv-bulletChart").data([e]),w=v.enter().append("g").attr("class","nvd3 nv-wrap nv-bulletChart"),x=w.append("g"),y=v.select("g");x.append("g").attr("class","nv-bulletWrap"),x.append("g").attr("class","nv-titles"),v.attr("transform","translate("+g.left+","+g.top+")");var z=d3.scale.linear().domain([0,Math.max(s[0],t[0]||0,u[0])]).range(f?[q,0]:[0,q]),A=this.__chart__||d3.scale.linear().domain([0,1/0]).range(z.range());this.__chart__=z;var B=x.select(".nv-titles").append("g").attr("text-anchor","end").attr("transform","translate(-6,"+(l-g.top-g.bottom)/2+")");B.append("text").attr("class","nv-title").text(function(a){return a.title}),B.append("text").attr("class","nv-subtitle").attr("dy","1em").text(function(a){return a.subtitle}),c.width(q).height(r);var C=y.select(".nv-bulletWrap");d3.transition(C).call(c);var D=m||z.tickFormat(q/100),E=y.selectAll("g.nv-tick").data(z.ticks(n?n:q/50),function(a){return this.textContent||D(a)}),F=E.enter().append("g").attr("class","nv-tick").attr("transform",function(a){return"translate("+A(a)+",0)"}).style("opacity",1e-6);F.append("line").attr("y1",r).attr("y2",7*r/6),F.append("text").attr("text-anchor","middle").attr("dy","1em").attr("y",7*r/6).text(D);var G=d3.transition(E).transition().duration(c.duration()).attr("transform",function(a){return"translate("+z(a)+",0)"}).style("opacity",1);G.select("line").attr("y1",r).attr("y2",7*r/6),G.select("text").attr("y",7*r/6),d3.transition(E.exit()).transition().duration(c.duration()).attr("transform",function(a){return"translate("+z(a)+",0)"}).style("opacity",1e-6).remove()}),d3.timer.flush(),b}var c=a.models.bullet(),d=a.models.tooltip(),e="left",f=!1,g={top:5,right:40,bottom:20,left:120},h=function(a){return a.ranges},i=function(a){return a.markers?a.markers:[]},j=function(a){return a.measures},k=null,l=55,m=null,n=null,o=null,p=d3.dispatch();return d.duration(0).headerEnabled(!1),c.dispatch.on("elementMouseover.tooltip",function(a){a.series={key:a.label,value:a.value,color:a.color},d.data(a).hidden(!1)}),c.dispatch.on("elementMouseout.tooltip",function(a){d.hidden(!0)}),c.dispatch.on("elementMousemove.tooltip",function(a){d()}),b.bullet=c,b.dispatch=p,b.tooltip=d,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{ranges:{get:function(){return h},set:function(a){h=a}},markers:{get:function(){return i},set:function(a){i=a}},measures:{get:function(){return j},set:function(a){j=a}},width:{get:function(){return k},set:function(a){k=a}},height:{get:function(){return l},set:function(a){l=a}},tickFormat:{get:function(){return m},set:function(a){m=a}},ticks:{get:function(){return n},set:function(a){n=a}},noData:{get:function(){return o},set:function(a){o=a}},margin:{get:function(){return g},set:function(a){g.top=void 0!==a.top?a.top:g.top,g.right=void 0!==a.right?a.right:g.right,g.bottom=void 0!==a.bottom?a.bottom:g.bottom,g.left=void 0!==a.left?a.left:g.left}},orient:{get:function(){return e},set:function(a){e=a,f="right"==e||"bottom"==e}}}),a.utils.inheritOptions(b,c),a.utils.initOptions(b),b},a.models.candlestickBar=function(){"use strict";function b(x){return x.each(function(b){c=d3.select(this);var x=a.utils.availableWidth(i,c,h),y=a.utils.availableHeight(j,c,h);a.utils.initSVG(c);var A=x/b[0].values.length*.45;l.domain(d||d3.extent(b[0].values.map(n).concat(t))),v?l.range(f||[.5*x/b[0].values.length,x*(b[0].values.length-.5)/b[0].values.length]):l.range(f||[5+A/2,x-A/2-5]),m.domain(e||[d3.min(b[0].values.map(s).concat(u)),d3.max(b[0].values.map(r).concat(u))]).range(g||[y,0]),l.domain()[0]===l.domain()[1]&&(l.domain()[0]?l.domain([l.domain()[0]-.01*l.domain()[0],l.domain()[1]+.01*l.domain()[1]]):l.domain([-1,1])),m.domain()[0]===m.domain()[1]&&(m.domain()[0]?m.domain([m.domain()[0]+.01*m.domain()[0],m.domain()[1]-.01*m.domain()[1]]):m.domain([-1,1]));var B=d3.select(this).selectAll("g.nv-wrap.nv-candlestickBar").data([b[0].values]),C=B.enter().append("g").attr("class","nvd3 nv-wrap nv-candlestickBar"),D=C.append("defs"),E=C.append("g"),F=B.select("g");E.append("g").attr("class","nv-ticks"),B.attr("transform","translate("+h.left+","+h.top+")"),c.on("click",function(a,b){z.chartClick({data:a,index:b,pos:d3.event,id:k})}),D.append("clipPath").attr("id","nv-chart-clip-path-"+k).append("rect"),B.select("#nv-chart-clip-path-"+k+" rect").attr("width",x).attr("height",y),F.attr("clip-path",w?"url(#nv-chart-clip-path-"+k+")":"");var G=B.select(".nv-ticks").selectAll(".nv-tick").data(function(a){return a});G.exit().remove();var H=G.enter().append("g");G.attr("class",function(a,b,c){return(p(a,b)>q(a,b)?"nv-tick negative":"nv-tick positive")+" nv-tick-"+c+"-"+b});H.append("line").attr("class","nv-candlestick-lines").attr("transform",function(a,b){return"translate("+l(n(a,b))+",0)"}).attr("x1",0).attr("y1",function(a,b){return m(r(a,b))}).attr("x2",0).attr("y2",function(a,b){return m(s(a,b))}),H.append("rect").attr("class","nv-candlestick-rects nv-bars").attr("transform",function(a,b){return"translate("+(l(n(a,b))-A/2)+","+(m(o(a,b))-(p(a,b)>q(a,b)?m(q(a,b))-m(p(a,b)):0))+")"}).attr("x",0).attr("y",0).attr("width",A).attr("height",function(a,b){var c=p(a,b),d=q(a,b);return c>d?m(d)-m(c):m(c)-m(d)});G.select(".nv-candlestick-lines").transition().attr("transform",function(a,b){return"translate("+l(n(a,b))+",0)"}).attr("x1",0).attr("y1",function(a,b){return m(r(a,b))}).attr("x2",0).attr("y2",function(a,b){return m(s(a,b))}),G.select(".nv-candlestick-rects").transition().attr("transform",function(a,b){return"translate("+(l(n(a,b))-A/2)+","+(m(o(a,b))-(p(a,b)>q(a,b)?m(q(a,b))-m(p(a,b)):0))+")"}).attr("x",0).attr("y",0).attr("width",A).attr("height",function(a,b){var c=p(a,b),d=q(a,b);return c>d?m(d)-m(c):m(c)-m(d)})}),b}var c,d,e,f,g,h={top:0,right:0,bottom:0,left:0},i=null,j=null,k=Math.floor(1e4*Math.random()),l=d3.scale.linear(),m=d3.scale.linear(),n=function(a){return a.x},o=function(a){return a.y},p=function(a){return a.open},q=function(a){return a.close},r=function(a){return a.high},s=function(a){return a.low},t=[],u=[],v=!1,w=!0,x=a.utils.defaultColor(),y=!1,z=d3.dispatch("stateChange","changeState","renderEnd","chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout","elementMousemove");return b.highlightPoint=function(a,d){b.clearHighlights(),c.select(".nv-candlestickBar .nv-tick-0-"+a).classed("hover",d)},b.clearHighlights=function(){c.select(".nv-candlestickBar .nv-tick.hover").classed("hover",!1)},b.dispatch=z,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return i},set:function(a){i=a}},height:{get:function(){return j},set:function(a){j=a}},xScale:{get:function(){return l},set:function(a){l=a}},yScale:{get:function(){return m},set:function(a){m=a}},xDomain:{get:function(){return d},set:function(a){d=a}},yDomain:{get:function(){return e},set:function(a){e=a}},xRange:{get:function(){return f},set:function(a){f=a}},yRange:{get:function(){return g},set:function(a){g=a}},forceX:{get:function(){return t},set:function(a){t=a}},forceY:{get:function(){return u},set:function(a){u=a}},padData:{get:function(){return v},set:function(a){v=a}},clipEdge:{get:function(){return w},set:function(a){w=a}},id:{get:function(){return k},set:function(a){k=a}},interactive:{get:function(){return y},set:function(a){y=a}},x:{get:function(){return n},set:function(a){n=a}},y:{get:function(){return o},set:function(a){o=a}},open:{get:function(){return p()},set:function(a){p=a}},close:{get:function(){return q()},set:function(a){q=a}},high:{get:function(){return r},set:function(a){r=a}},low:{get:function(){return s},set:function(a){s=a}},margin:{get:function(){return h},set:function(a){h.top=void 0!=a.top?a.top:h.top,h.right=void 0!=a.right?a.right:h.right,h.bottom=void 0!=a.bottom?a.bottom:h.bottom,h.left=void 0!=a.left?a.left:h.left}},color:{get:function(){return x},set:function(b){x=a.utils.getColor(b)}}}),a.utils.initOptions(b),b},a.models.cumulativeLineChart=function(){"use strict";function b(l){return I.reset(),I.models(f),s&&I.models(g),t&&I.models(h),l.each(function(l){function B(a,c){d3.select(b.container).style("cursor","ew-resize")}function F(a,b){H.x=d3.event.x,H.i=Math.round(G.invert(H.x)),L()}function I(a,c){d3.select(b.container).style("cursor","auto"),z.index=H.i,D.stateChange(z)}function L(){ba.data([H]);var a=b.duration();b.duration(0),b.update(),b.duration(a)}var M=d3.select(this);a.utils.initSVG(M),M.classed("nv-chart-"+y,!0);var N=a.utils.availableWidth(p,M,m),O=a.utils.availableHeight(q,M,m);if(b.update=function(){0===E?M.call(b):M.transition().duration(E).call(b)},b.container=this,z.setter(K(l),b.update).getter(J(l)).update(),z.disabled=l.map(function(a){return!!a.disabled}),!A){var P;A={};for(P in z)z[P]instanceof Array?A[P]=z[P].slice(0):A[P]=z[P]}var Q=d3.behavior.drag().on("dragstart",B).on("drag",F).on("dragend",I);if(!(l&&l.length&&l.filter(function(a){return a.values.length}).length))return a.utils.noData(b,M),b;if(M.selectAll(".nv-noData").remove(),d=f.xScale(),e=f.yScale(),x)f.yDomain(null);else{var R=l.filter(function(a){return!a.disabled}).map(function(a,b){var c=d3.extent(a.values,f.y());return c[0]<-.95&&(c[0]=-.95),[(c[0]-c[1])/(1+c[1]),(c[1]-c[0])/(1+c[0])]}),S=[d3.min(R,function(a){return a[0]}),d3.max(R,function(a){return a[1]})];f.yDomain(S)}G.domain([0,l[0].values.length-1]).range([0,N]).clamp(!0);var l=c(H.i,l),T=w?"none":"all",U=M.selectAll("g.nv-wrap.nv-cumulativeLine").data([l]),V=U.enter().append("g").attr("class","nvd3 nv-wrap nv-cumulativeLine").append("g"),W=U.select("g");if(V.append("g").attr("class","nv-interactive"),V.append("g").attr("class","nv-x nv-axis").style("pointer-events","none"),V.append("g").attr("class","nv-y nv-axis"),V.append("g").attr("class","nv-background"),V.append("g").attr("class","nv-linesWrap").style("pointer-events",T),V.append("g").attr("class","nv-avgLinesWrap").style("pointer-events","none"),V.append("g").attr("class","nv-legendWrap"),V.append("g").attr("class","nv-controlsWrap"),r?(i.width(N),W.select(".nv-legendWrap").datum(l).call(i),n||i.height()===m.top||(m.top=i.height(),O=a.utils.availableHeight(q,M,m)),W.select(".nv-legendWrap").attr("transform","translate(0,"+-m.top+")")):W.select(".nv-legendWrap").selectAll("*").remove(),v){var X=[{key:"Re-scale y-axis",disabled:!x}];j.width(140).color(["#444","#444","#444"]).rightAlign(!1).margin({top:5,right:0,bottom:5,left:20}),W.select(".nv-controlsWrap").datum(X).attr("transform","translate(0,"+-m.top+")").call(j)}else W.select(".nv-controlsWrap").selectAll("*").remove();U.attr("transform","translate("+m.left+","+m.top+")"),u&&W.select(".nv-y.nv-axis").attr("transform","translate("+N+",0)");var Y=l.filter(function(a){return a.tempDisabled});U.select(".tempDisabled").remove(),Y.length&&U.append("text").attr("class","tempDisabled").attr("x",N/2).attr("y","-.71em").style("text-anchor","end").text(Y.map(function(a){return a.key}).join(", ")+" values cannot be calculated for this time period."),w&&(k.width(N).height(O).margin({left:m.left,top:m.top}).svgContainer(M).xScale(d),U.select(".nv-interactive").call(k)),V.select(".nv-background").append("rect"),W.select(".nv-background rect").attr("width",N).attr("height",O),f.y(function(a){return a.display.y}).width(N).height(O).color(l.map(function(a,b){return a.color||o(a,b)}).filter(function(a,b){return!l[b].disabled&&!l[b].tempDisabled}));var Z=W.select(".nv-linesWrap").datum(l.filter(function(a){return!a.disabled&&!a.tempDisabled}));Z.call(f),l.forEach(function(a,b){a.seriesIndex=b});var $=l.filter(function(a){return!a.disabled&&!!C(a)}),_=W.select(".nv-avgLinesWrap").selectAll("line").data($,function(a){return a.key}),aa=function(a){var b=e(C(a));return 0>b?0:b>O?O:b};_.enter().append("line").style("stroke-width",2).style("stroke-dasharray","10,10").style("stroke",function(a,b){return f.color()(a,a.seriesIndex)}).attr("x1",0).attr("x2",N).attr("y1",aa).attr("y2",aa),_.style("stroke-opacity",function(a){var b=e(C(a));return 0>b||b>O?0:1}).attr("x1",0).attr("x2",N).attr("y1",aa).attr("y2",aa),_.exit().remove();var ba=Z.selectAll(".nv-indexLine").data([H]);ba.enter().append("rect").attr("class","nv-indexLine").attr("width",3).attr("x",-2).attr("fill","red").attr("fill-opacity",.5).style("pointer-events","all").call(Q),ba.attr("transform",function(a){return"translate("+G(a.i)+",0)"}).attr("height",O),s&&(g.scale(d)._ticks(a.utils.calcTicksX(N/70,l)).tickSize(-O,0),W.select(".nv-x.nv-axis").attr("transform","translate(0,"+e.range()[0]+")"),W.select(".nv-x.nv-axis").call(g)),t&&(h.scale(e)._ticks(a.utils.calcTicksY(O/36,l)).tickSize(-N,0),W.select(".nv-y.nv-axis").call(h)),W.select(".nv-background rect").on("click",function(){H.x=d3.mouse(this)[0],H.i=Math.round(G.invert(H.x)),z.index=H.i,D.stateChange(z),L()}),f.dispatch.on("elementClick",function(a){H.i=a.pointIndex,H.x=G(H.i),z.index=H.i,D.stateChange(z),L()}),j.dispatch.on("legendClick",function(a,c){a.disabled=!a.disabled,x=!a.disabled,z.rescaleY=x,D.stateChange(z),b.update()}),i.dispatch.on("stateChange",function(a){for(var c in a)z[c]=a[c];D.stateChange(z),b.update()}),k.dispatch.on("elementMousemove",function(c){f.clearHighlights();var d,e,i,j=[];if(l.filter(function(a,b){return a.seriesIndex=b,!a.disabled}).forEach(function(g,h){e=a.interactiveBisect(g.values,c.pointXValue,b.x()),f.highlightPoint(h,e,!0);var k=g.values[e];"undefined"!=typeof k&&("undefined"==typeof d&&(d=k),"undefined"==typeof i&&(i=b.xScale()(b.x()(k,e))),j.push({key:g.key,value:b.y()(k,e),color:o(g,g.seriesIndex)}))}),j.length>2){var m=b.yScale().invert(c.mouseY),n=Math.abs(b.yScale().domain()[0]-b.yScale().domain()[1]),p=.03*n,q=a.nearestValueIndex(j.map(function(a){return a.value}),m,p);null!==q&&(j[q].highlight=!0)}var r=g.tickFormat()(b.x()(d,e),e);k.tooltip.valueFormatter(function(a,b){return h.tickFormat()(a)}).data({value:r,series:j})(),k.renderGuideLine(i)}),k.dispatch.on("elementMouseout",function(a){f.clearHighlights()}),D.on("changeState",function(a){"undefined"!=typeof a.disabled&&(l.forEach(function(b,c){b.disabled=a.disabled[c]}),z.disabled=a.disabled),"undefined"!=typeof a.index&&(H.i=a.index,H.x=G(H.i),z.index=a.index,ba.data([H])),"undefined"!=typeof a.rescaleY&&(x=a.rescaleY),b.update()})}),I.renderEnd("cumulativeLineChart immediate"),b}function c(a,b){return L||(L=f.y()),b.map(function(b,c){if(!b.values)return b;var d=b.values[a];if(null==d)return b;var e=L(d,a);return-.95>e&&!F?(b.tempDisabled=!0,b):(b.tempDisabled=!1,b.values=b.values.map(function(a,b){return a.display={y:(L(a,b)-e)/(1+e)},a}),b)})}var d,e,f=a.models.line(),g=a.models.axis(),h=a.models.axis(),i=a.models.legend(),j=a.models.legend(),k=a.interactiveGuideline(),l=a.models.tooltip(),m={top:30,right:30,bottom:50,left:60},n=null,o=a.utils.defaultColor(),p=null,q=null,r=!0,s=!0,t=!0,u=!1,v=!0,w=!1,x=!0,y=f.id(),z=a.utils.state(),A=null,B=null,C=function(a){return a.average},D=d3.dispatch("stateChange","changeState","renderEnd"),E=250,F=!1;z.index=0,z.rescaleY=x,g.orient("bottom").tickPadding(7),h.orient(u?"right":"left"),l.valueFormatter(function(a,b){return h.tickFormat()(a,b)}).headerFormatter(function(a,b){return g.tickFormat()(a,b)}),j.updateState(!1);var G=d3.scale.linear(),H={i:0,x:0},I=a.utils.renderWatch(D,E),J=function(a){return function(){return{active:a.map(function(a){return!a.disabled}),index:H.i,rescaleY:x}}},K=function(a){return function(b){void 0!==b.index&&(H.i=b.index),void 0!==b.rescaleY&&(x=b.rescaleY),void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}};f.dispatch.on("elementMouseover.tooltip",function(a){var c={x:b.x()(a.point),y:b.y()(a.point),color:a.point.color};a.point=c,l.data(a).hidden(!1)}),f.dispatch.on("elementMouseout.tooltip",function(a){l.hidden(!0)});var L=null;return b.dispatch=D,b.lines=f,b.legend=i,b.controls=j,b.xAxis=g,b.yAxis=h,b.interactiveLayer=k,b.state=z,b.tooltip=l,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return p},set:function(a){p=a}},height:{get:function(){return q},set:function(a){q=a}},rescaleY:{get:function(){return x},set:function(a){x=a}},showControls:{get:function(){return v},set:function(a){v=a}},showLegend:{get:function(){return r},set:function(a){r=a}},average:{get:function(){return C},set:function(a){C=a}},defaultState:{get:function(){return A},set:function(a){A=a}},noData:{get:function(){return B},set:function(a){B=a}},showXAxis:{get:function(){return s},set:function(a){s=a}},showYAxis:{get:function(){return t},set:function(a){t=a}},noErrorCheck:{get:function(){return F},set:function(a){F=a}},margin:{get:function(){return m},set:function(a){void 0!==a.top&&(m.top=a.top,n=a.top),m.right=void 0!==a.right?a.right:m.right,m.bottom=void 0!==a.bottom?a.bottom:m.bottom,m.left=void 0!==a.left?a.left:m.left}},color:{get:function(){return o},set:function(b){o=a.utils.getColor(b),i.color(o)}},useInteractiveGuideline:{get:function(){return w},set:function(a){w=a,a===!0&&(b.interactive(!1),b.useVoronoi(!1))}},rightAlignYAxis:{get:function(){return u},set:function(a){u=a,h.orient(a?"right":"left")}},duration:{get:function(){return E},set:function(a){E=a,f.duration(E),g.duration(E),h.duration(E),I.reset(E)}}}),a.utils.inheritOptions(b,f),a.utils.initOptions(b),b},a.models.discreteBar=function(){"use strict";function b(m){return y.reset(),m.each(function(b){var m=k-j.left-j.right,x=l-j.top-j.bottom;c=d3.select(this),a.utils.initSVG(c),b.forEach(function(a,b){a.values.forEach(function(a){a.series=b})});var z=d&&e?[]:b.map(function(a){return a.values.map(function(a,b){return{x:p(a,b),y:q(a,b),y0:a.y0}})});n.domain(d||d3.merge(z).map(function(a){return a.x})).rangeBands(f||[0,m],.1),o.domain(e||d3.extent(d3.merge(z).map(function(a){return a.y}).concat(r))),t?o.range(g||[x-(o.domain()[0]<0?12:0),o.domain()[1]>0?12:0]):o.range(g||[x,0]),h=h||n,i=i||o.copy().range([o(0),o(0)]);var A=c.selectAll("g.nv-wrap.nv-discretebar").data([b]),B=A.enter().append("g").attr("class","nvd3 nv-wrap nv-discretebar"),C=B.append("g");A.select("g");C.append("g").attr("class","nv-groups"),A.attr("transform","translate("+j.left+","+j.top+")");var D=A.select(".nv-groups").selectAll(".nv-group").data(function(a){return a},function(a){return a.key});D.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),D.exit().watchTransition(y,"discreteBar: exit groups").style("stroke-opacity",1e-6).style("fill-opacity",1e-6).remove(),D.attr("class",function(a,b){return"nv-group nv-series-"+b}).classed("hover",function(a){return a.hover}),D.watchTransition(y,"discreteBar: groups").style("stroke-opacity",1).style("fill-opacity",.75);var E=D.selectAll("g.nv-bar").data(function(a){return a.values});E.exit().remove();var F=E.enter().append("g").attr("transform",function(a,b,c){return"translate("+(n(p(a,b))+.05*n.rangeBand())+", "+o(0)+")"}).on("mouseover",function(a,b){d3.select(this).classed("hover",!0),v.elementMouseover({data:a,index:b,color:d3.select(this).style("fill")})}).on("mouseout",function(a,b){d3.select(this).classed("hover",!1),v.elementMouseout({data:a,index:b,color:d3.select(this).style("fill")})}).on("mousemove",function(a,b){v.elementMousemove({data:a,index:b,color:d3.select(this).style("fill")})}).on("click",function(a,b){var c=this;v.elementClick({data:a,index:b,color:d3.select(this).style("fill"),event:d3.event,element:c}),d3.event.stopPropagation()}).on("dblclick",function(a,b){v.elementDblClick({data:a,index:b,color:d3.select(this).style("fill")}),d3.event.stopPropagation()});F.append("rect").attr("height",0).attr("width",.9*n.rangeBand()/b.length),t?(F.append("text").attr("text-anchor","middle"),E.select("text").text(function(a,b){return u(q(a,b))}).watchTransition(y,"discreteBar: bars text").attr("x",.9*n.rangeBand()/2).attr("y",function(a,b){return q(a,b)<0?o(q(a,b))-o(0)+12:-4})):E.selectAll("text").remove(),E.attr("class",function(a,b){return q(a,b)<0?"nv-bar negative":"nv-bar positive"}).style("fill",function(a,b){return a.color||s(a,b)}).style("stroke",function(a,b){return a.color||s(a,b)}).select("rect").attr("class",w).watchTransition(y,"discreteBar: bars rect").attr("width",.9*n.rangeBand()/b.length),E.watchTransition(y,"discreteBar: bars").attr("transform",function(a,b){var c=n(p(a,b))+.05*n.rangeBand(),d=q(a,b)<0?o(0):o(0)-o(q(a,b))<1?o(0)-1:o(q(a,b));return"translate("+c+", "+d+")"}).select("rect").attr("height",function(a,b){return Math.max(Math.abs(o(q(a,b))-o(0)),1)}),h=n.copy(),i=o.copy()}),y.renderEnd("discreteBar immediate"),b}var c,d,e,f,g,h,i,j={top:0,right:0,bottom:0,left:0},k=960,l=500,m=Math.floor(1e4*Math.random()),n=d3.scale.ordinal(),o=d3.scale.linear(),p=function(a){return a.x},q=function(a){return a.y},r=[0],s=a.utils.defaultColor(),t=!1,u=d3.format(",.2f"),v=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout","elementMousemove","renderEnd"),w="discreteBar",x=250,y=a.utils.renderWatch(v,x);return b.dispatch=v,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return k},set:function(a){k=a}},height:{get:function(){return l},set:function(a){l=a}},forceY:{get:function(){return r},set:function(a){r=a;
+}},showValues:{get:function(){return t},set:function(a){t=a}},x:{get:function(){return p},set:function(a){p=a}},y:{get:function(){return q},set:function(a){q=a}},xScale:{get:function(){return n},set:function(a){n=a}},yScale:{get:function(){return o},set:function(a){o=a}},xDomain:{get:function(){return d},set:function(a){d=a}},yDomain:{get:function(){return e},set:function(a){e=a}},xRange:{get:function(){return f},set:function(a){f=a}},yRange:{get:function(){return g},set:function(a){g=a}},valueFormat:{get:function(){return u},set:function(a){u=a}},id:{get:function(){return m},set:function(a){m=a}},rectClass:{get:function(){return w},set:function(a){w=a}},margin:{get:function(){return j},set:function(a){j.top=void 0!==a.top?a.top:j.top,j.right=void 0!==a.right?a.right:j.right,j.bottom=void 0!==a.bottom?a.bottom:j.bottom,j.left=void 0!==a.left?a.left:j.left}},color:{get:function(){return s},set:function(b){s=a.utils.getColor(b)}},duration:{get:function(){return x},set:function(a){x=a,y.reset(x)}}}),a.utils.initOptions(b),b},a.models.discreteBarChart=function(){"use strict";function b(i){return y.reset(),y.models(e),p&&y.models(f),q&&y.models(g),i.each(function(i){var n=d3.select(this);a.utils.initSVG(n);var v=a.utils.availableWidth(l,n,j),y=a.utils.availableHeight(m,n,j);if(b.update=function(){w.beforeUpdate(),n.transition().duration(x).call(b)},b.container=this,!(i&&i.length&&i.filter(function(a){return a.values.length}).length))return a.utils.noData(b,n),b;n.selectAll(".nv-noData").remove(),c=e.xScale(),d=e.yScale().clamp(!0);var z=n.selectAll("g.nv-wrap.nv-discreteBarWithAxes").data([i]),A=z.enter().append("g").attr("class","nvd3 nv-wrap nv-discreteBarWithAxes").append("g"),B=A.append("defs"),C=z.select("g");A.append("g").attr("class","nv-x nv-axis"),A.append("g").attr("class","nv-y nv-axis").append("g").attr("class","nv-zeroLine").append("line"),A.append("g").attr("class","nv-barsWrap"),A.append("g").attr("class","nv-legendWrap"),C.attr("transform","translate("+j.left+","+j.top+")"),o?(h.width(v),C.select(".nv-legendWrap").datum(i).call(h),k||h.height()===j.top||(j.top=h.height(),y=a.utils.availableHeight(m,n,j)),z.select(".nv-legendWrap").attr("transform","translate(0,"+-j.top+")")):C.select(".nv-legendWrap").selectAll("*").remove(),r&&C.select(".nv-y.nv-axis").attr("transform","translate("+v+",0)"),e.width(v).height(y);var D=C.select(".nv-barsWrap").datum(i.filter(function(a){return!a.disabled}));if(D.transition().call(e),B.append("clipPath").attr("id","nv-x-label-clip-"+e.id()).append("rect"),C.select("#nv-x-label-clip-"+e.id()+" rect").attr("width",c.rangeBand()*(s?2:1)).attr("height",16).attr("x",-c.rangeBand()/(s?1:2)),p){f.scale(c)._ticks(a.utils.calcTicksX(v/100,i)).tickSize(-y,0),C.select(".nv-x.nv-axis").attr("transform","translate(0,"+(d.range()[0]+(e.showValues()&&d.domain()[0]<0?16:0))+")"),C.select(".nv-x.nv-axis").call(f);var E=C.select(".nv-x.nv-axis").selectAll("g");s&&E.selectAll("text").attr("transform",function(a,b,c){return"translate(0,"+(c%2==0?"5":"17")+")"}),u&&E.selectAll(".tick text").attr("transform","rotate("+u+" 0,0)").style("text-anchor",u>0?"start":"end"),t&&C.selectAll(".tick text").call(a.utils.wrapTicks,b.xAxis.rangeBand())}q&&(g.scale(d)._ticks(a.utils.calcTicksY(y/36,i)).tickSize(-v,0),C.select(".nv-y.nv-axis").call(g)),C.select(".nv-zeroLine line").attr("x1",0).attr("x2",r?-v:v).attr("y1",d(0)).attr("y2",d(0))}),y.renderEnd("discreteBar chart immediate"),b}var c,d,e=a.models.discreteBar(),f=a.models.axis(),g=a.models.axis(),h=a.models.legend(),i=a.models.tooltip(),j={top:15,right:10,bottom:50,left:60},k=null,l=null,m=null,n=a.utils.getColor(),o=!1,p=!0,q=!0,r=!1,s=!1,t=!1,u=0,v=null,w=d3.dispatch("beforeUpdate","renderEnd"),x=250;f.orient("bottom").showMaxMin(!1).tickFormat(function(a){return a}),g.orient(r?"right":"left").tickFormat(d3.format(",.1f")),i.duration(0).headerEnabled(!1).valueFormatter(function(a,b){return g.tickFormat()(a,b)}).keyFormatter(function(a,b){return f.tickFormat()(a,b)});var y=a.utils.renderWatch(w,x);return e.dispatch.on("elementMouseover.tooltip",function(a){a.series={key:b.x()(a.data),value:b.y()(a.data),color:a.color},i.data(a).hidden(!1)}),e.dispatch.on("elementMouseout.tooltip",function(a){i.hidden(!0)}),e.dispatch.on("elementMousemove.tooltip",function(a){i()}),b.dispatch=w,b.discretebar=e,b.legend=h,b.xAxis=f,b.yAxis=g,b.tooltip=i,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return l},set:function(a){l=a}},height:{get:function(){return m},set:function(a){m=a}},showLegend:{get:function(){return o},set:function(a){o=a}},staggerLabels:{get:function(){return s},set:function(a){s=a}},rotateLabels:{get:function(){return u},set:function(a){u=a}},wrapLabels:{get:function(){return t},set:function(a){t=!!a}},showXAxis:{get:function(){return p},set:function(a){p=a}},showYAxis:{get:function(){return q},set:function(a){q=a}},noData:{get:function(){return v},set:function(a){v=a}},margin:{get:function(){return j},set:function(a){void 0!==a.top&&(j.top=a.top,k=a.top),j.right=void 0!==a.right?a.right:j.right,j.bottom=void 0!==a.bottom?a.bottom:j.bottom,j.left=void 0!==a.left?a.left:j.left}},duration:{get:function(){return x},set:function(a){x=a,y.reset(x),e.duration(x),f.duration(x),g.duration(x)}},color:{get:function(){return n},set:function(b){n=a.utils.getColor(b),e.color(n),h.color(n)}},rightAlignYAxis:{get:function(){return r},set:function(a){r=a,g.orient(a?"right":"left")}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.distribution=function(){"use strict";function b(k){return m.reset(),k.each(function(b){var k=(e-("x"===g?d.left+d.right:d.top+d.bottom),"x"==g?"y":"x"),l=d3.select(this);a.utils.initSVG(l),c=c||j;var n=l.selectAll("g.nv-distribution").data([b]),o=n.enter().append("g").attr("class","nvd3 nv-distribution"),p=(o.append("g"),n.select("g"));n.attr("transform","translate("+d.left+","+d.top+")");var q=p.selectAll("g.nv-dist").data(function(a){return a},function(a){return a.key});q.enter().append("g"),q.attr("class",function(a,b){return"nv-dist nv-series-"+b}).style("stroke",function(a,b){return i(a,b)});var r=q.selectAll("line.nv-dist"+g).data(function(a){return a.values});r.enter().append("line").attr(g+"1",function(a,b){return c(h(a,b))}).attr(g+"2",function(a,b){return c(h(a,b))}),m.transition(q.exit().selectAll("line.nv-dist"+g),"dist exit").attr(g+"1",function(a,b){return j(h(a,b))}).attr(g+"2",function(a,b){return j(h(a,b))}).style("stroke-opacity",0).remove(),r.attr("class",function(a,b){return"nv-dist"+g+" nv-dist"+g+"-"+b}).attr(k+"1",0).attr(k+"2",f),m.transition(r,"dist").attr(g+"1",function(a,b){return j(h(a,b))}).attr(g+"2",function(a,b){return j(h(a,b))}),c=j.copy()}),m.renderEnd("distribution immediate"),b}var c,d={top:0,right:0,bottom:0,left:0},e=400,f=8,g="x",h=function(a){return a[g]},i=a.utils.defaultColor(),j=d3.scale.linear(),k=250,l=d3.dispatch("renderEnd"),m=a.utils.renderWatch(l,k);return b.options=a.utils.optionsFunc.bind(b),b.dispatch=l,b.margin=function(a){return arguments.length?(d.top="undefined"!=typeof a.top?a.top:d.top,d.right="undefined"!=typeof a.right?a.right:d.right,d.bottom="undefined"!=typeof a.bottom?a.bottom:d.bottom,d.left="undefined"!=typeof a.left?a.left:d.left,b):d},b.width=function(a){return arguments.length?(e=a,b):e},b.axis=function(a){return arguments.length?(g=a,b):g},b.size=function(a){return arguments.length?(f=a,b):f},b.getData=function(a){return arguments.length?(h=d3.functor(a),b):h},b.scale=function(a){return arguments.length?(j=a,b):j},b.color=function(c){return arguments.length?(i=a.utils.getColor(c),b):i},b.duration=function(a){return arguments.length?(k=a,m.reset(k),b):k},b},a.models.focus=function(b){"use strict";function c(u){return t.reset(),t.models(b),m&&t.models(f),n&&t.models(g),u.each(function(t){function u(a){var b=+("e"==a),c=b?1:-1,d=z/3;return"M"+.5*c+","+d+"A6,6 0 0 "+b+" "+6.5*c+","+(d+6)+"V"+(2*d-6)+"A6,6 0 0 "+b+" "+.5*c+","+2*d+"ZM"+2.5*c+","+(d+8)+"V"+(2*d-8)+"M"+4.5*c+","+(d+8)+"V"+(2*d-8)}function v(){h.empty()||h.extent(p),E.data([h.empty()?d.domain():p]).each(function(a,b){var c=d(a[0])-d.range()[0],e=y-d(a[1]);d3.select(this).select(".left").attr("width",0>c?0:c),d3.select(this).select(".right").attr("x",d(a[1])).attr("width",0>e?0:e)})}function w(a){p=h.empty()?null:h.extent();var b=h.empty()?d.domain():h.extent();r.brush({extent:b,brush:h}),v(),a&&r.onBrush(b)}var x=d3.select(this);a.utils.initSVG(x);var y=a.utils.availableWidth(k,x,i),z=l-i.top-i.bottom;c.update=function(){0===q?x.call(c):x.transition().duration(q).call(c)},c.container=this,d=b.xScale(),e=b.yScale();var A=x.selectAll("g.nv-focus").data([t]),B=A.enter().append("g").attr("class","nvd3 nv-focus").append("g"),C=A.select("g");A.attr("transform","translate("+i.left+","+i.top+")"),B.append("g").attr("class","nv-background").append("rect"),B.append("g").attr("class","nv-x nv-axis"),B.append("g").attr("class","nv-y nv-axis"),B.append("g").attr("class","nv-contentWrap"),B.append("g").attr("class","nv-brushBackground"),B.append("g").attr("class","nv-x nv-brush"),o&&C.select(".nv-y.nv-axis").attr("transform","translate("+y+",0)"),C.select(".nv-background rect").attr("width",y).attr("height",z),b.width(y).height(z).color(t.map(function(a,b){return a.color||j(a,b)}).filter(function(a,b){return!t[b].disabled}));var D=C.select(".nv-contentWrap").datum(t.filter(function(a){return!a.disabled}));d3.transition(D).call(b),h.x(d).on("brush",function(){w(s)}),h.on("brushend",function(){s||r.onBrush(h.empty()?d.domain():h.extent())}),p&&h.extent(p);var E=C.select(".nv-brushBackground").selectAll("g").data([p||h.extent()]),F=E.enter().append("g");F.append("rect").attr("class","left").attr("x",0).attr("y",0).attr("height",z),F.append("rect").attr("class","right").attr("x",0).attr("y",0).attr("height",z);var G=C.select(".nv-x.nv-brush").call(h);G.selectAll("rect").attr("height",z),G.selectAll(".resize").append("path").attr("d",u),w(!0),C.select(".nv-background rect").attr("width",y).attr("height",z),m&&(f.scale(d)._ticks(a.utils.calcTicksX(y/100,t)).tickSize(-z,0),C.select(".nv-x.nv-axis").attr("transform","translate(0,"+e.range()[0]+")"),d3.transition(C.select(".nv-x.nv-axis")).call(f)),n&&(g.scale(e)._ticks(a.utils.calcTicksY(z/36,t)).tickSize(-y,0),d3.transition(C.select(".nv-y.nv-axis")).call(g)),C.select(".nv-x.nv-axis").attr("transform","translate(0,"+e.range()[0]+")")}),t.renderEnd("focus immediate"),c}var d,e,b=b||a.models.line(),f=a.models.axis(),g=a.models.axis(),h=d3.svg.brush(),i={top:10,right:0,bottom:30,left:0},j=a.utils.defaultColor(),k=null,l=70,m=!0,n=!1,o=!1,p=null,q=250,r=d3.dispatch("brush","onBrush","renderEnd"),s=!0;b.interactive(!1),b.pointActive(function(a){return!1});var t=a.utils.renderWatch(r,q);return c.dispatch=r,c.content=b,c.brush=h,c.xAxis=f,c.yAxis=g,c.options=a.utils.optionsFunc.bind(c),c._options=Object.create({},{width:{get:function(){return k},set:function(a){k=a}},height:{get:function(){return l},set:function(a){l=a}},showXAxis:{get:function(){return m},set:function(a){m=a}},showYAxis:{get:function(){return n},set:function(a){n=a}},brushExtent:{get:function(){return p},set:function(a){p=a}},syncBrushing:{get:function(){return s},set:function(a){s=a}},margin:{get:function(){return i},set:function(a){i.top=void 0!==a.top?a.top:i.top,i.right=void 0!==a.right?a.right:i.right,i.bottom=void 0!==a.bottom?a.bottom:i.bottom,i.left=void 0!==a.left?a.left:i.left}},duration:{get:function(){return q},set:function(a){q=a,t.reset(q),b.duration(q),f.duration(q),g.duration(q)}},color:{get:function(){return j},set:function(c){j=a.utils.getColor(c),b.color(j)}},interpolate:{get:function(){return b.interpolate()},set:function(a){b.interpolate(a)}},xTickFormat:{get:function(){return f.tickFormat()},set:function(a){f.tickFormat(a)}},yTickFormat:{get:function(){return g.tickFormat()},set:function(a){g.tickFormat(a)}},x:{get:function(){return b.x()},set:function(a){b.x(a)}},y:{get:function(){return b.y()},set:function(a){b.y(a)}},rightAlignYAxis:{get:function(){return o},set:function(a){o=a,g.orient(o?"right":"left")}}}),a.utils.inheritOptions(c,b),a.utils.initOptions(c),c},a.models.forceDirectedGraph=function(){"use strict";function b(g){return u.reset(),g.each(function(g){f=d3.select(this),a.utils.initSVG(f);var j=a.utils.availableWidth(d,f,c),u=a.utils.availableHeight(e,f,c);if(f.attr("width",j).attr("height",u),!(g&&g.links&&g.nodes))return a.utils.noData(b,f),b;f.selectAll(".nv-noData").remove(),f.selectAll("*").remove();var v=new Set;g.nodes.forEach(function(a){var b=Object.keys(a);b.forEach(function(a){v.add(a)})});var w=d3.layout.force().nodes(g.nodes).links(g.links).size([j,u]).linkStrength(k).friction(l).linkDistance(m).charge(n).gravity(o).theta(p).alpha(q).start(),x=f.selectAll(".link").data(g.links).enter().append("line").attr("class","nv-force-link").style("stroke-width",function(a){return Math.sqrt(a.value)}),y=f.selectAll(".node").data(g.nodes).enter().append("g").attr("class","nv-force-node").call(w.drag);y.append("circle").attr("r",r).style("fill",function(a){return h(a)}).on("mouseover",function(a){f.select(".nv-series-"+a.seriesIndex+" .nv-distx-"+a.pointIndex).attr("y1",a.py),f.select(".nv-series-"+a.seriesIndex+" .nv-disty-"+a.pointIndex).attr("x2",a.px);var b=h(a);a.series=[],v.forEach(function(c){a.series.push({color:b,key:c,value:a[c]})}),i.data(a).hidden(!1)}).on("mouseout",function(a){i.hidden(!0)}),i.headerFormatter(function(a){return"Node"}),t(x),s(y),w.on("tick",function(){x.attr("x1",function(a){return a.source.x}).attr("y1",function(a){return a.source.y}).attr("x2",function(a){return a.target.x}).attr("y2",function(a){return a.target.y}),y.attr("transform",function(a){return"translate("+a.x+", "+a.y+")"})})}),b}var c={top:2,right:0,bottom:2,left:0},d=400,e=32,f=null,g=d3.dispatch("renderEnd"),h=a.utils.getColor(["#000"]),i=a.models.tooltip(),j=null,k=.1,l=.9,m=30,n=-120,o=.1,p=.8,q=.1,r=5,s=function(a){},t=function(a){},u=a.utils.renderWatch(g);return b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return d},set:function(a){d=a}},height:{get:function(){return e},set:function(a){e=a}},linkStrength:{get:function(){return k},set:function(a){k=a}},friction:{get:function(){return l},set:function(a){l=a}},linkDist:{get:function(){return m},set:function(a){m=a}},charge:{get:function(){return n},set:function(a){n=a}},gravity:{get:function(){return o},set:function(a){o=a}},theta:{get:function(){return p},set:function(a){p=a}},alpha:{get:function(){return q},set:function(a){q=a}},radius:{get:function(){return r},set:function(a){r=a}},x:{get:function(){return getX},set:function(a){getX=d3.functor(a)}},y:{get:function(){return getY},set:function(a){getY=d3.functor(a)}},margin:{get:function(){return c},set:function(a){c.top=void 0!==a.top?a.top:c.top,c.right=void 0!==a.right?a.right:c.right,c.bottom=void 0!==a.bottom?a.bottom:c.bottom,c.left=void 0!==a.left?a.left:c.left}},color:{get:function(){return h},set:function(b){h=a.utils.getColor(b)}},noData:{get:function(){return j},set:function(a){j=a}},nodeExtras:{get:function(){return s},set:function(a){s=a}},linkExtras:{get:function(){return t},set:function(a){t=a}}}),b.dispatch=g,b.tooltip=i,a.utils.initOptions(b),b},a.models.furiousLegend=function(){"use strict";function b(r){function s(a,b){return"furious"!=q?"#000":o?a.disengaged?h(a,b):"#fff":o?void 0:a.disabled?h(a,b):"#fff"}function t(a,b){return o&&"furious"==q?a.disengaged?"#fff":h(a,b):a.disabled?"#fff":h(a,b)}return r.each(function(b){var r=d-c.left-c.right,u=d3.select(this);a.utils.initSVG(u);var v=u.selectAll("g.nv-legend").data([b]),w=(v.enter().append("g").attr("class","nvd3 nv-legend").append("g"),v.select("g"));v.attr("transform","translate("+c.left+","+c.top+")");var x,y=w.selectAll(".nv-series").data(function(a){return"furious"!=q?a:a.filter(function(a){return o?!0:!a.disengaged})}),z=y.enter().append("g").attr("class","nv-series");if("classic"==q)z.append("circle").style("stroke-width",2).attr("class","nv-legend-symbol").attr("r",5),x=y.select("circle");else if("furious"==q){z.append("rect").style("stroke-width",2).attr("class","nv-legend-symbol").attr("rx",3).attr("ry",3),x=y.select("rect"),z.append("g").attr("class","nv-check-box").property("innerHTML",'<path d="M0.5,5 L22.5,5 L22.5,26.5 L0.5,26.5 L0.5,5 Z" class="nv-box"></path><path d="M5.5,12.8618467 L11.9185089,19.2803556 L31,0.198864511" class="nv-check"></path>').attr("transform","translate(-10,-8)scale(0.5)");var A=y.select(".nv-check-box");A.each(function(a,b){d3.select(this).selectAll("path").attr("stroke",s(a,b))})}z.append("text").attr("text-anchor","start").attr("class","nv-legend-text").attr("dy",".32em").attr("dx","8");var B=y.select("text.nv-legend-text");y.on("mouseover",function(a,b){p.legendMouseover(a,b)}).on("mouseout",function(a,b){p.legendMouseout(a,b)}).on("click",function(a,b){p.legendClick(a,b);var c=y.data();if(m){if("classic"==q)n?(c.forEach(function(a){a.disabled=!0}),a.disabled=!1):(a.disabled=!a.disabled,c.every(function(a){return a.disabled})&&c.forEach(function(a){a.disabled=!1}));else if("furious"==q)if(o)a.disengaged=!a.disengaged,a.userDisabled=void 0==a.userDisabled?!!a.disabled:a.userDisabled,a.disabled=a.disengaged||a.userDisabled;else if(!o){a.disabled=!a.disabled,a.userDisabled=a.disabled;var d=c.filter(function(a){return!a.disengaged});d.every(function(a){return a.userDisabled})&&c.forEach(function(a){a.disabled=a.userDisabled=!1})}p.stateChange({disabled:c.map(function(a){return!!a.disabled}),disengaged:c.map(function(a){return!!a.disengaged})})}}).on("dblclick",function(a,b){if(("furious"!=q||!o)&&(p.legendDblclick(a,b),m)){var c=y.data();c.forEach(function(a){a.disabled=!0,"furious"==q&&(a.userDisabled=a.disabled)}),a.disabled=!1,"furious"==q&&(a.userDisabled=a.disabled),p.stateChange({disabled:c.map(function(a){return!!a.disabled})})}}),y.classed("nv-disabled",function(a){return a.userDisabled}),y.exit().remove(),B.attr("fill",s).text(function(a){return g(f(a))});var C;switch(q){case"furious":C=23;break;case"classic":C=20}if(j){var D=[];y.each(function(b,c){var d;if(g(f(b))&&g(f(b)).length>i){var e=g(f(b)).substring(0,i);d=d3.select(this).select("text").text(e+"..."),d3.select(this).append("svg:title").text(g(f(b)))}else d=d3.select(this).select("text");var h;try{if(h=d.node().getComputedTextLength(),0>=h)throw Error()}catch(j){h=a.utils.calcApproxTextWidth(d)}D.push(h+k)});for(var E=0,F=0,G=[];r>F&&E<D.length;)G[E]=D[E],F+=D[E++];for(0===E&&(E=1);F>r&&E>1;){G=[],E--;for(var H=0;H<D.length;H++)D[H]>(G[H%E]||0)&&(G[H%E]=D[H]);F=G.reduce(function(a,b,c,d){return a+b})}for(var I=[],J=0,K=0;E>J;J++)I[J]=K,K+=G[J];y.attr("transform",function(a,b){return"translate("+I[b%E]+","+(5+Math.floor(b/E)*C)+")"}),l?w.attr("transform","translate("+(d-c.right-F)+","+c.top+")"):w.attr("transform","translate(0,"+c.top+")"),e=c.top+c.bottom+Math.ceil(D.length/E)*C}else{var L,M=5,N=5,O=0;y.attr("transform",function(a,b){var e=d3.select(this).select("text").node().getComputedTextLength()+k;return L=N,d<c.left+c.right+L+e&&(N=L=5,M+=C),N+=e,N>O&&(O=N),"translate("+L+","+M+")"}),w.attr("transform","translate("+(d-c.right-O)+","+c.top+")"),e=c.top+c.bottom+M+15}"furious"==q&&x.attr("width",function(a,b){return B[0][b].getComputedTextLength()+27}).attr("height",18).attr("y",-9).attr("x",-15),x.style("fill",t).style("stroke",function(a,b){return a.color||h(a,b)})}),b}var c={top:5,right:0,bottom:5,left:0},d=400,e=20,f=function(a){return a.key},g=function(a){return a},h=a.utils.getColor(),i=20,j=!0,k=28,l=!0,m=!0,n=!1,o=!1,p=d3.dispatch("legendClick","legendDblclick","legendMouseover","legendMouseout","stateChange"),q="classic";return b.dispatch=p,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return d},set:function(a){d=a}},height:{get:function(){return e},set:function(a){e=a}},key:{get:function(){return f},set:function(a){f=a}},keyFormatter:{get:function(){return g},set:function(a){g=a}},align:{get:function(){return j},set:function(a){j=a}},rightAlign:{get:function(){return l},set:function(a){l=a}},maxKeyLength:{get:function(){return i},set:function(a){i=a}},padding:{get:function(){return k},set:function(a){k=a}},updateState:{get:function(){return m},set:function(a){m=a}},radioButtonMode:{get:function(){return n},set:function(a){n=a}},expanded:{get:function(){return o},set:function(a){o=a}},vers:{get:function(){return q},set:function(a){q=a}},margin:{get:function(){return c},set:function(a){c.top=void 0!==a.top?a.top:c.top,c.right=void 0!==a.right?a.right:c.right,c.bottom=void 0!==a.bottom?a.bottom:c.bottom,c.left=void 0!==a.left?a.left:c.left}},color:{get:function(){return h},set:function(b){h=a.utils.getColor(b)}}}),a.utils.initOptions(b),b},a.models.historicalBar=function(){"use strict";function b(x){return x.each(function(b){w.reset(),k=d3.select(this);var x=a.utils.availableWidth(h,k,g),y=a.utils.availableHeight(i,k,g);a.utils.initSVG(k),l.domain(c||d3.extent(b[0].values.map(n).concat(p))),r?l.range(e||[.5*x/b[0].values.length,x*(b[0].values.length-.5)/b[0].values.length]):l.range(e||[0,x]),m.domain(d||d3.extent(b[0].values.map(o).concat(q))).range(f||[y,0]),l.domain()[0]===l.domain()[1]&&(l.domain()[0]?l.domain([l.domain()[0]-.01*l.domain()[0],l.domain()[1]+.01*l.domain()[1]]):l.domain([-1,1])),m.domain()[0]===m.domain()[1]&&(m.domain()[0]?m.domain([m.domain()[0]+.01*m.domain()[0],m.domain()[1]-.01*m.domain()[1]]):m.domain([-1,1]));var z=k.selectAll("g.nv-wrap.nv-historicalBar-"+j).data([b[0].values]),A=z.enter().append("g").attr("class","nvd3 nv-wrap nv-historicalBar-"+j),B=A.append("defs"),C=A.append("g"),D=z.select("g");C.append("g").attr("class","nv-bars"),z.attr("transform","translate("+g.left+","+g.top+")"),k.on("click",function(a,b){u.chartClick({data:a,index:b,pos:d3.event,id:j})}),B.append("clipPath").attr("id","nv-chart-clip-path-"+j).append("rect"),z.select("#nv-chart-clip-path-"+j+" rect").attr("width",x).attr("height",y),D.attr("clip-path",s?"url(#nv-chart-clip-path-"+j+")":"");var E=z.select(".nv-bars").selectAll(".nv-bar").data(function(a){return a},function(a,b){return n(a,b)});E.exit().remove(),E.enter().append("rect").attr("x",0).attr("y",function(b,c){return a.utils.NaNtoZero(m(Math.max(0,o(b,c))))}).attr("height",function(b,c){return a.utils.NaNtoZero(Math.abs(m(o(b,c))-m(0)))}).attr("transform",function(a,c){return"translate("+(l(n(a,c))-x/b[0].values.length*.45)+",0)"}).on("mouseover",function(a,b){v&&(d3.select(this).classed("hover",!0),u.elementMouseover({data:a,index:b,color:d3.select(this).style("fill")}))}).on("mouseout",function(a,b){v&&(d3.select(this).classed("hover",!1),u.elementMouseout({data:a,index:b,color:d3.select(this).style("fill")}))}).on("mousemove",function(a,b){v&&u.elementMousemove({data:a,index:b,color:d3.select(this).style("fill")})}).on("click",function(a,b){if(v){var c=this;u.elementClick({data:a,index:b,color:d3.select(this).style("fill"),event:d3.event,element:c}),d3.event.stopPropagation()}}).on("dblclick",function(a,b){v&&(u.elementDblClick({data:a,index:b,color:d3.select(this).style("fill")}),d3.event.stopPropagation())}),E.attr("fill",function(a,b){return t(a,b)}).attr("class",function(a,b,c){return(o(a,b)<0?"nv-bar negative":"nv-bar positive")+" nv-bar-"+c+"-"+b}).watchTransition(w,"bars").attr("transform",function(a,c){return"translate("+(l(n(a,c))-x/b[0].values.length*.45)+",0)"}).attr("width",x/b[0].values.length*.9),E.watchTransition(w,"bars").attr("y",function(b,c){var d=o(b,c)<0?m(0):m(0)-m(o(b,c))<1?m(0)-1:m(o(b,c));return a.utils.NaNtoZero(d)}).attr("height",function(b,c){return a.utils.NaNtoZero(Math.max(Math.abs(m(o(b,c))-m(0)),1))})}),w.renderEnd("historicalBar immediate"),b}var c,d,e,f,g={top:0,right:0,bottom:0,left:0},h=null,i=null,j=Math.floor(1e4*Math.random()),k=null,l=d3.scale.linear(),m=d3.scale.linear(),n=function(a){return a.x},o=function(a){return a.y},p=[],q=[0],r=!1,s=!0,t=a.utils.defaultColor(),u=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout","elementMousemove","renderEnd"),v=!0,w=a.utils.renderWatch(u,0);return b.highlightPoint=function(a,b){k.select(".nv-bars .nv-bar-0-"+a).classed("hover",b)},b.clearHighlights=function(){k.select(".nv-bars .nv-bar.hover").classed("hover",!1)},b.dispatch=u,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return h},set:function(a){h=a}},height:{get:function(){return i},set:function(a){i=a}},forceX:{get:function(){return p},set:function(a){p=a}},forceY:{get:function(){return q},set:function(a){q=a}},padData:{get:function(){return r},set:function(a){r=a}},x:{get:function(){return n},set:function(a){n=a}},y:{get:function(){return o},set:function(a){o=a}},xScale:{get:function(){return l},set:function(a){l=a}},yScale:{get:function(){return m},set:function(a){m=a}},xDomain:{get:function(){return c},set:function(a){c=a}},yDomain:{get:function(){return d},set:function(a){d=a}},xRange:{get:function(){return e},set:function(a){e=a}},yRange:{get:function(){return f},set:function(a){f=a}},clipEdge:{get:function(){return s},set:function(a){s=a}},id:{get:function(){return j},set:function(a){j=a}},interactive:{get:function(){return v},set:function(a){v=a}},margin:{get:function(){return g},set:function(a){g.top=void 0!==a.top?a.top:g.top,g.right=void 0!==a.right?a.right:g.right,g.bottom=void 0!==a.bottom?a.bottom:g.bottom,g.left=void 0!==a.left?a.left:g.left}},color:{get:function(){return t},set:function(b){t=a.utils.getColor(b)}}}),a.utils.initOptions(b),b},a.models.historicalBarChart=function(b){"use strict";function c(b){return b.each(function(k){A.reset(),A.models(f),r&&A.models(g),s&&A.models(h);var x=d3.select(this);a.utils.initSVG(x);var B=a.utils.availableWidth(o,x,l),C=a.utils.availableHeight(p,x,l);if(c.update=function(){x.transition().duration(z).call(c)},c.container=this,v.disabled=k.map(function(a){return!!a.disabled}),!w){var D;w={};for(D in v)v[D]instanceof Array?w[D]=v[D].slice(0):w[D]=v[D]}if(!(k&&k.length&&k.filter(function(a){return a.values.length}).length))return a.utils.noData(c,x),c;x.selectAll(".nv-noData").remove(),d=f.xScale(),e=f.yScale();var E=x.selectAll("g.nv-wrap.nv-historicalBarChart").data([k]),F=E.enter().append("g").attr("class","nvd3 nv-wrap nv-historicalBarChart").append("g"),G=E.select("g");F.append("g").attr("class","nv-x nv-axis"),F.append("g").attr("class","nv-y nv-axis"),F.append("g").attr("class","nv-barsWrap"),F.append("g").attr("class","nv-legendWrap"),F.append("g").attr("class","nv-interactive"),q?(i.width(B),G.select(".nv-legendWrap").datum(k).call(i),m||i.height()===l.top||(l.top=i.height(),C=a.utils.availableHeight(p,x,l)),E.select(".nv-legendWrap").attr("transform","translate(0,"+-l.top+")")):G.select(".nv-legendWrap").selectAll("*").remove(),E.attr("transform","translate("+l.left+","+l.top+")"),t&&G.select(".nv-y.nv-axis").attr("transform","translate("+B+",0)"),u&&(j.width(B).height(C).margin({left:l.left,top:l.top}).svgContainer(x).xScale(d),E.select(".nv-interactive").call(j)),f.width(B).height(C).color(k.map(function(a,b){return a.color||n(a,b)}).filter(function(a,b){return!k[b].disabled}));var H=G.select(".nv-barsWrap").datum(k.filter(function(a){return!a.disabled}));H.transition().call(f),r&&(g.scale(d)._ticks(a.utils.calcTicksX(B/100,k)).tickSize(-C,0),G.select(".nv-x.nv-axis").attr("transform","translate(0,"+e.range()[0]+")"),G.select(".nv-x.nv-axis").transition().call(g)),s&&(h.scale(e)._ticks(a.utils.calcTicksY(C/36,k)).tickSize(-B,0),G.select(".nv-y.nv-axis").transition().call(h)),j.dispatch.on("elementMousemove",function(b){f.clearHighlights();var d,e,i,l=[];k.filter(function(a,b){return a.seriesIndex=b,!a.disabled}).forEach(function(g,h){e=a.interactiveBisect(g.values,b.pointXValue,c.x()),f.highlightPoint(e,!0);var j=g.values[e];void 0!==j&&(void 0===d&&(d=j),void 0===i&&(i=c.xScale()(c.x()(j,e))),l.push({key:g.key,value:c.y()(j,e),color:n(g,g.seriesIndex),data:g.values[e]}))});var m=g.tickFormat()(c.x()(d,e));j.tooltip.valueFormatter(function(a,b){return h.tickFormat()(a)}).data({value:m,index:e,series:l})(),j.renderGuideLine(i)}),j.dispatch.on("elementMouseout",function(a){y.tooltipHide(),f.clearHighlights()}),i.dispatch.on("legendClick",function(a,d){a.disabled=!a.disabled,k.filter(function(a){return!a.disabled}).length||k.map(function(a){return a.disabled=!1,E.selectAll(".nv-series").classed("disabled",!1),a}),v.disabled=k.map(function(a){return!!a.disabled}),y.stateChange(v),b.transition().call(c)}),i.dispatch.on("legendDblclick",function(a){k.forEach(function(a){a.disabled=!0}),a.disabled=!1,v.disabled=k.map(function(a){return!!a.disabled}),y.stateChange(v),c.update()}),y.on("changeState",function(a){"undefined"!=typeof a.disabled&&(k.forEach(function(b,c){b.disabled=a.disabled[c]}),v.disabled=a.disabled),c.update()})}),A.renderEnd("historicalBarChart immediate"),c}var d,e,f=b||a.models.historicalBar(),g=a.models.axis(),h=a.models.axis(),i=a.models.legend(),j=a.interactiveGuideline(),k=a.models.tooltip(),l={top:30,right:90,bottom:50,left:90},m=null,n=a.utils.defaultColor(),o=null,p=null,q=!1,r=!0,s=!0,t=!1,u=!1,v={},w=null,x=null,y=d3.dispatch("tooltipHide","stateChange","changeState","renderEnd"),z=250;g.orient("bottom").tickPadding(7),h.orient(t?"right":"left"),k.duration(0).headerEnabled(!1).valueFormatter(function(a,b){return h.tickFormat()(a,b)}).headerFormatter(function(a,b){return g.tickFormat()(a,b)});var A=a.utils.renderWatch(y,0);return f.dispatch.on("elementMouseover.tooltip",function(a){a.series={key:c.x()(a.data),value:c.y()(a.data),color:a.color},k.data(a).hidden(!1)}),f.dispatch.on("elementMouseout.tooltip",function(a){k.hidden(!0)}),f.dispatch.on("elementMousemove.tooltip",function(a){k()}),c.dispatch=y,c.bars=f,c.legend=i,c.xAxis=g,c.yAxis=h,c.interactiveLayer=j,c.tooltip=k,c.options=a.utils.optionsFunc.bind(c),c._options=Object.create({},{width:{get:function(){return o},set:function(a){o=a}},height:{get:function(){return p},set:function(a){p=a}},showLegend:{get:function(){return q},set:function(a){q=a}},showXAxis:{get:function(){return r},set:function(a){r=a}},showYAxis:{get:function(){return s},set:function(a){s=a}},defaultState:{get:function(){return w},set:function(a){w=a}},noData:{get:function(){return x},set:function(a){x=a}},margin:{get:function(){return l},set:function(a){void 0!==a.top&&(l.top=a.top,m=a.top),l.right=void 0!==a.right?a.right:l.right,l.bottom=void 0!==a.bottom?a.bottom:l.bottom,l.left=void 0!==a.left?a.left:l.left}},color:{get:function(){return n},set:function(b){n=a.utils.getColor(b),i.color(n),f.color(n)}},duration:{get:function(){return z},set:function(a){z=a,A.reset(z),h.duration(z),g.duration(z)}},rightAlignYAxis:{get:function(){return t},set:function(a){t=a,h.orient(a?"right":"left")}},useInteractiveGuideline:{get:function(){return u},set:function(a){u=a,a===!0&&c.interactive(!1)}}}),a.utils.inheritOptions(c,f),a.utils.initOptions(c),c},a.models.ohlcBarChart=function(){var b=a.models.historicalBarChart(a.models.ohlcBar());return b.useInteractiveGuideline(!0),b.interactiveLayer.tooltip.contentGenerator(function(a){var c=a.series[0].data,d=c.open<c.close?"2ca02c":"d62728";return'<h3 style="color: #'+d+'">'+a.value+"</h3><table><tr><td>open:</td><td>"+b.yAxis.tickFormat()(c.open)+"</td></tr><tr><td>close:</td><td>"+b.yAxis.tickFormat()(c.close)+"</td></tr><tr><td>high</td><td>"+b.yAxis.tickFormat()(c.high)+"</td></tr><tr><td>low:</td><td>"+b.yAxis.tickFormat()(c.low)+"</td></tr></table>"}),b},a.models.candlestickBarChart=function(){var b=a.models.historicalBarChart(a.models.candlestickBar());return b.useInteractiveGuideline(!0),b.interactiveLayer.tooltip.contentGenerator(function(a){var c=a.series[0].data,d=c.open<c.close?"2ca02c":"d62728";return'<h3 style="color: #'+d+'">'+a.value+"</h3><table><tr><td>open:</td><td>"+b.yAxis.tickFormat()(c.open)+"</td></tr><tr><td>close:</td><td>"+b.yAxis.tickFormat()(c.close)+"</td></tr><tr><td>high</td><td>"+b.yAxis.tickFormat()(c.high)+"</td></tr><tr><td>low:</td><td>"+b.yAxis.tickFormat()(c.low)+"</td></tr></table>";
+}),b},a.models.legend=function(){"use strict";function b(r){function s(a,b){return"furious"!=q?"#000":o?a.disengaged?"#000":"#fff":o?void 0:(a.color||(a.color=h(a,b)),a.disabled?a.color:"#fff")}function t(a,b){return o&&"furious"==q&&a.disengaged?"#eee":a.color||h(a,b)}function u(a,b){return o&&"furious"==q?1:a.disabled?0:1}return r.each(function(b){var h=d-c.left-c.right,r=d3.select(this);a.utils.initSVG(r);var v=r.selectAll("g.nv-legend").data([b]),w=v.enter().append("g").attr("class","nvd3 nv-legend").append("g"),x=v.select("g");l?v.attr("transform","translate("+-c.right+","+c.top+")"):v.attr("transform","translate("+c.left+","+c.top+")");var y,z,A=x.selectAll(".nv-series").data(function(a){return"furious"!=q?a:a.filter(function(a){return o?!0:!a.disengaged})}),B=A.enter().append("g").attr("class","nv-series");switch(q){case"furious":z=23;break;case"classic":z=20}if("classic"==q)B.append("circle").style("stroke-width",2).attr("class","nv-legend-symbol").attr("r",5),y=A.select(".nv-legend-symbol");else if("furious"==q){B.append("rect").style("stroke-width",2).attr("class","nv-legend-symbol").attr("rx",3).attr("ry",3),y=A.select(".nv-legend-symbol"),B.append("g").attr("class","nv-check-box").property("innerHTML",'<path d="M0.5,5 L22.5,5 L22.5,26.5 L0.5,26.5 L0.5,5 Z" class="nv-box"></path><path d="M5.5,12.8618467 L11.9185089,19.2803556 L31,0.198864511" class="nv-check"></path>').attr("transform","translate(-10,-8)scale(0.5)");var C=A.select(".nv-check-box");C.each(function(a,b){d3.select(this).selectAll("path").attr("stroke",s(a,b))})}B.append("text").attr("text-anchor","start").attr("class","nv-legend-text").attr("dy",".32em").attr("dx","8");var D=A.select("text.nv-legend-text");A.on("mouseover",function(a,b){p.legendMouseover(a,b)}).on("mouseout",function(a,b){p.legendMouseout(a,b)}).on("click",function(a,b){p.legendClick(a,b);var c=A.data();if(m){if("classic"==q)n?(c.forEach(function(a){a.disabled=!0}),a.disabled=!1):(a.disabled=!a.disabled,c.every(function(a){return a.disabled})&&c.forEach(function(a){a.disabled=!1}));else if("furious"==q)if(o)a.disengaged=!a.disengaged,a.userDisabled=void 0==a.userDisabled?!!a.disabled:a.userDisabled,a.disabled=a.disengaged||a.userDisabled;else if(!o){a.disabled=!a.disabled,a.userDisabled=a.disabled;var d=c.filter(function(a){return!a.disengaged});d.every(function(a){return a.userDisabled})&&c.forEach(function(a){a.disabled=a.userDisabled=!1})}p.stateChange({disabled:c.map(function(a){return!!a.disabled}),disengaged:c.map(function(a){return!!a.disengaged})})}}).on("dblclick",function(a,b){if(("furious"!=q||!o)&&(p.legendDblclick(a,b),m)){var c=A.data();c.forEach(function(a){a.disabled=!0,"furious"==q&&(a.userDisabled=a.disabled)}),a.disabled=!1,"furious"==q&&(a.userDisabled=a.disabled),p.stateChange({disabled:c.map(function(a){return!!a.disabled})})}}),A.classed("nv-disabled",function(a){return a.userDisabled}),A.exit().remove(),D.attr("fill",s).text(function(a){return g(f(a))});var E=0;if(j){var F=[];A.each(function(b,c){var d;if(g(f(b))&&g(f(b)).length>i){var e=g(f(b)).substring(0,i);d=d3.select(this).select("text").text(e+"..."),d3.select(this).append("svg:title").text(g(f(b)))}else d=d3.select(this).select("text");var h;try{if(h=d.node().getComputedTextLength(),0>=h)throw Error()}catch(j){h=a.utils.calcApproxTextWidth(d)}F.push(h+k)});var G=0,H=[];for(E=0;h>E&&G<F.length;)H[G]=F[G],E+=F[G++];for(0===G&&(G=1);E>h&&G>1;){H=[],G--;for(var I=0;I<F.length;I++)F[I]>(H[I%G]||0)&&(H[I%G]=F[I]);E=H.reduce(function(a,b,c,d){return a+b})}for(var J=[],K=0,L=0;G>K;K++)J[K]=L,L+=H[K];A.attr("transform",function(a,b){return"translate("+J[b%G]+","+(5+Math.floor(b/G)*z)+")"}),l?x.attr("transform","translate("+(d-c.right-E)+","+c.top+")"):x.attr("transform","translate(0,"+c.top+")"),e=c.top+c.bottom+Math.ceil(F.length/G)*z}else{var M,N=5,O=5,P=0;A.attr("transform",function(a,b){var e=d3.select(this).select("text").node().getComputedTextLength()+k;return M=O,d<c.left+c.right+M+e&&(O=M=5,N+=z),O+=e,O>P&&(P=O),M+P>E&&(E=M+P),"translate("+M+","+N+")"}),x.attr("transform","translate("+(d-c.right-P)+","+c.top+")"),e=c.top+c.bottom+N+15}if("furious"==q){y.attr("width",function(a,b){return D[0][b].getComputedTextLength()+27}).attr("height",18).attr("y",-9).attr("x",-15),w.insert("rect",":first-child").attr("class","nv-legend-bg").attr("fill","#eee").attr("opacity",0);var Q=x.select(".nv-legend-bg");Q.transition().duration(300).attr("x",-z).attr("width",E+z-12).attr("height",e+10).attr("y",-c.top-10).attr("opacity",o?1:0)}y.style("fill",t).style("fill-opacity",u).style("stroke",t)}),b}var c={top:5,right:0,bottom:5,left:0},d=400,e=20,f=function(a){return a.key},g=function(a){return a},h=a.utils.getColor(),i=20,j=!0,k=32,l=!0,m=!0,n=!1,o=!1,p=d3.dispatch("legendClick","legendDblclick","legendMouseover","legendMouseout","stateChange"),q="classic";return b.dispatch=p,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return d},set:function(a){d=a}},height:{get:function(){return e},set:function(a){e=a}},key:{get:function(){return f},set:function(a){f=a}},keyFormatter:{get:function(){return g},set:function(a){g=a}},align:{get:function(){return j},set:function(a){j=a}},maxKeyLength:{get:function(){return i},set:function(a){i=a}},rightAlign:{get:function(){return l},set:function(a){l=a}},padding:{get:function(){return k},set:function(a){k=a}},updateState:{get:function(){return m},set:function(a){m=a}},radioButtonMode:{get:function(){return n},set:function(a){n=a}},expanded:{get:function(){return o},set:function(a){o=a}},vers:{get:function(){return q},set:function(a){q=a}},margin:{get:function(){return c},set:function(a){c.top=void 0!==a.top?a.top:c.top,c.right=void 0!==a.right?a.right:c.right,c.bottom=void 0!==a.bottom?a.bottom:c.bottom,c.left=void 0!==a.left?a.left:c.left}},color:{get:function(){return h},set:function(b){h=a.utils.getColor(b)}}}),a.utils.initOptions(b),b},a.models.line=function(){"use strict";function b(r){return v.reset(),v.models(e),r.each(function(b){i=d3.select(this);var r=a.utils.availableWidth(g,i,f),s=a.utils.availableHeight(h,i,f);a.utils.initSVG(i),c=e.xScale(),d=e.yScale(),t=t||c,u=u||d;var w=i.selectAll("g.nv-wrap.nv-line").data([b]),x=w.enter().append("g").attr("class","nvd3 nv-wrap nv-line"),y=x.append("defs"),z=x.append("g"),A=w.select("g");z.append("g").attr("class","nv-groups"),z.append("g").attr("class","nv-scatterWrap"),w.attr("transform","translate("+f.left+","+f.top+")"),e.width(r).height(s);var B=w.select(".nv-scatterWrap");B.call(e),y.append("clipPath").attr("id","nv-edge-clip-"+e.id()).append("rect"),w.select("#nv-edge-clip-"+e.id()+" rect").attr("width",r).attr("height",s>0?s:0),A.attr("clip-path",p?"url(#nv-edge-clip-"+e.id()+")":""),B.attr("clip-path",p?"url(#nv-edge-clip-"+e.id()+")":"");var C=w.select(".nv-groups").selectAll(".nv-group").data(function(a){return a},function(a){return a.key});C.enter().append("g").style("stroke-opacity",1e-6).style("stroke-width",function(a){return a.strokeWidth||j}).style("fill-opacity",1e-6),C.exit().remove(),C.attr("class",function(a,b){return(a.classed||"")+" nv-group nv-series-"+b}).classed("hover",function(a){return a.hover}).style("fill",function(a,b){return k(a,b)}).style("stroke",function(a,b){return k(a,b)}),C.watchTransition(v,"line: groups").style("stroke-opacity",1).style("fill-opacity",function(a){return a.fillOpacity||.5});var D=C.selectAll("path.nv-area").data(function(a){return o(a)?[a]:[]});D.enter().append("path").attr("class","nv-area").attr("d",function(b){return d3.svg.area().interpolate(q).defined(n).x(function(b,c){return a.utils.NaNtoZero(t(l(b,c)))}).y0(function(b,c){return a.utils.NaNtoZero(u(m(b,c)))}).y1(function(a,b){return u(d.domain()[0]<=0?d.domain()[1]>=0?0:d.domain()[1]:d.domain()[0])}).apply(this,[b.values])}),C.exit().selectAll("path.nv-area").remove(),D.watchTransition(v,"line: areaPaths").attr("d",function(b){return d3.svg.area().interpolate(q).defined(n).x(function(b,d){return a.utils.NaNtoZero(c(l(b,d)))}).y0(function(b,c){return a.utils.NaNtoZero(d(m(b,c)))}).y1(function(a,b){return d(d.domain()[0]<=0?d.domain()[1]>=0?0:d.domain()[1]:d.domain()[0])}).apply(this,[b.values])});var E=C.selectAll("path.nv-line").data(function(a){return[a.values]});E.enter().append("path").attr("class","nv-line").attr("d",d3.svg.line().interpolate(q).defined(n).x(function(b,c){return a.utils.NaNtoZero(t(l(b,c)))}).y(function(b,c){return a.utils.NaNtoZero(u(m(b,c)))})),E.watchTransition(v,"line: linePaths").attr("d",d3.svg.line().interpolate(q).defined(n).x(function(b,d){return a.utils.NaNtoZero(c(l(b,d)))}).y(function(b,c){return a.utils.NaNtoZero(d(m(b,c)))})),t=c.copy(),u=d.copy()}),v.renderEnd("line immediate"),b}var c,d,e=a.models.scatter(),f={top:0,right:0,bottom:0,left:0},g=960,h=500,i=null,j=1.5,k=a.utils.defaultColor(),l=function(a){return a.x},m=function(a){return a.y},n=function(a,b){return!isNaN(m(a,b))&&null!==m(a,b)},o=function(a){return a.area},p=!1,q="linear",r=250,s=d3.dispatch("elementClick","elementMouseover","elementMouseout","renderEnd");e.pointSize(16).pointDomain([16,256]);var t,u,v=a.utils.renderWatch(s,r);return b.dispatch=s,b.scatter=e,e.dispatch.on("elementClick",function(){s.elementClick.apply(this,arguments)}),e.dispatch.on("elementMouseover",function(){s.elementMouseover.apply(this,arguments)}),e.dispatch.on("elementMouseout",function(){s.elementMouseout.apply(this,arguments)}),b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return g},set:function(a){g=a}},height:{get:function(){return h},set:function(a){h=a}},defined:{get:function(){return n},set:function(a){n=a}},interpolate:{get:function(){return q},set:function(a){q=a}},clipEdge:{get:function(){return p},set:function(a){p=a}},margin:{get:function(){return f},set:function(a){f.top=void 0!==a.top?a.top:f.top,f.right=void 0!==a.right?a.right:f.right,f.bottom=void 0!==a.bottom?a.bottom:f.bottom,f.left=void 0!==a.left?a.left:f.left}},duration:{get:function(){return r},set:function(a){r=a,v.reset(r),e.duration(r)}},isArea:{get:function(){return o},set:function(a){o=d3.functor(a)}},x:{get:function(){return l},set:function(a){l=a,e.x(a)}},y:{get:function(){return m},set:function(a){m=a,e.y(a)}},color:{get:function(){return k},set:function(b){k=a.utils.getColor(b),e.color(k)}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.lineChart=function(){"use strict";function b(j){return C.reset(),C.models(e),s&&C.models(f),t&&C.models(g),j.each(function(j){function z(){s&&M.select(".nv-focus .nv-x.nv-axis").transition().duration(B).call(f)}function C(){t&&M.select(".nv-focus .nv-y.nv-axis").transition().duration(B).call(g)}function F(a){var b=M.select(".nv-focus .nv-linesWrap").datum(j.filter(function(a){return!a.disabled}).map(function(b,c){return{key:b.key,area:b.area,classed:b.classed,values:b.values.filter(function(b,c){return e.x()(b,c)>=a[0]&&e.x()(b,c)<=a[1]}),disableTooltip:b.disableTooltip}}));b.transition().duration(B).call(e),z(),C()}var G=d3.select(this);a.utils.initSVG(G);var H=a.utils.availableWidth(o,G,l),I=a.utils.availableHeight(p,G,l)-(w?k.height():0);if(b.update=function(){0===B?G.call(b):G.transition().duration(B).call(b)},b.container=this,x.setter(E(j),b.update).getter(D(j)).update(),x.disabled=j.map(function(a){return!!a.disabled}),!y){var J;y={};for(J in x)x[J]instanceof Array?y[J]=x[J].slice(0):y[J]=x[J]}if(!(j&&j.length&&j.filter(function(a){return a.values.length}).length))return a.utils.noData(b,G),b;G.selectAll(".nv-noData").remove(),k.dispatch.on("onBrush",function(a){F(a)}),c=e.xScale(),d=e.yScale();var K=G.selectAll("g.nv-wrap.nv-lineChart").data([j]),L=K.enter().append("g").attr("class","nvd3 nv-wrap nv-lineChart").append("g"),M=K.select("g");L.append("g").attr("class","nv-legendWrap");var N=L.append("g").attr("class","nv-focus");N.append("g").attr("class","nv-background").append("rect"),N.append("g").attr("class","nv-x nv-axis"),N.append("g").attr("class","nv-y nv-axis"),N.append("g").attr("class","nv-linesWrap"),N.append("g").attr("class","nv-interactive");L.append("g").attr("class","nv-focusWrap");q?(h.width(H),M.select(".nv-legendWrap").datum(j).call(h),"bottom"===r?K.select(".nv-legendWrap").attr("transform","translate(0,"+I+")"):"top"===r&&(m||h.height()===l.top||(l.top=h.height(),I=a.utils.availableHeight(p,G,l)-(w?k.height():0)),K.select(".nv-legendWrap").attr("transform","translate(0,"+-l.top+")"))):M.select(".nv-legendWrap").selectAll("*").remove(),K.attr("transform","translate("+l.left+","+l.top+")"),u&&M.select(".nv-y.nv-axis").attr("transform","translate("+H+",0)"),v&&(i.width(H).height(I).margin({left:l.left,top:l.top}).svgContainer(G).xScale(c),K.select(".nv-interactive").call(i)),M.select(".nv-focus .nv-background rect").attr("width",H).attr("height",I),e.width(H).height(I).color(j.map(function(a,b){return a.color||n(a,b)}).filter(function(a,b){return!j[b].disabled}));var O=M.select(".nv-linesWrap").datum(j.filter(function(a){return!a.disabled}));if(s&&f.scale(c)._ticks(a.utils.calcTicksX(H/100,j)).tickSize(-I,0),t&&g.scale(d)._ticks(a.utils.calcTicksY(I/36,j)).tickSize(-H,0),M.select(".nv-focus .nv-x.nv-axis").attr("transform","translate(0,"+I+")"),w){k.width(H),M.select(".nv-focusWrap").attr("transform","translate(0,"+(I+l.bottom+k.margin().top)+")").datum(j.filter(function(a){return!a.disabled})).call(k);var P=k.brush.empty()?k.xDomain():k.brush.extent();null!==P&&F(P)}else O.call(e),z(),C();h.dispatch.on("stateChange",function(a){for(var c in a)x[c]=a[c];A.stateChange(x),b.update()}),i.dispatch.on("elementMousemove",function(d){e.clearHighlights();var f,h,l,m=[];if(j.filter(function(a,b){return a.seriesIndex=b,!a.disabled&&!a.disableTooltip}).forEach(function(g,i){var j=w?k.brush.empty()?k.xScale().domain():k.brush.extent():c.domain(),o=g.values.filter(function(a,b){return j[0]<=j[1]?e.x()(a,b)>=j[0]&&e.x()(a,b)<=j[1]:e.x()(a,b)>=j[1]&&e.x()(a,b)<=j[0]});h=a.interactiveBisect(o,d.pointXValue,e.x());var p=o[h],q=b.y()(p,h);null!==q&&e.highlightPoint(i,h,!0),void 0!==p&&(void 0===f&&(f=p),void 0===l&&(l=b.xScale()(b.x()(p,h))),m.push({key:g.key,value:q,color:n(g,g.seriesIndex),data:p}))}),m.length>2){var o=b.yScale().invert(d.mouseY),p=Math.abs(b.yScale().domain()[0]-b.yScale().domain()[1]),q=.03*p,r=a.nearestValueIndex(m.map(function(a){return a.value}),o,q);null!==r&&(m[r].highlight=!0)}var s=function(a,b){return null==a?"N/A":g.tickFormat()(a)};i.tooltip.valueFormatter(i.tooltip.valueFormatter()||s).data({value:b.x()(f,h),index:h,series:m})(),i.renderGuideLine(l)}),i.dispatch.on("elementClick",function(c){var d,f=[];j.filter(function(a,b){return a.seriesIndex=b,!a.disabled}).forEach(function(e){var g=a.interactiveBisect(e.values,c.pointXValue,b.x()),h=e.values[g];if("undefined"!=typeof h){"undefined"==typeof d&&(d=b.xScale()(b.x()(h,g)));var i=b.yScale()(b.y()(h,g));f.push({point:h,pointIndex:g,pos:[d,i],seriesIndex:e.seriesIndex,series:e})}}),e.dispatch.elementClick(f)}),i.dispatch.on("elementMouseout",function(a){e.clearHighlights()}),A.on("changeState",function(a){"undefined"!=typeof a.disabled&&j.length===a.disabled.length&&(j.forEach(function(b,c){b.disabled=a.disabled[c]}),x.disabled=a.disabled),b.update()})}),C.renderEnd("lineChart immediate"),b}var c,d,e=a.models.line(),f=a.models.axis(),g=a.models.axis(),h=a.models.legend(),i=a.interactiveGuideline(),j=a.models.tooltip(),k=a.models.focus(a.models.line()),l={top:30,right:20,bottom:50,left:60},m=null,n=a.utils.defaultColor(),o=null,p=null,q=!0,r="top",s=!0,t=!0,u=!1,v=!1,w=!1,x=a.utils.state(),y=null,z=null,A=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState","renderEnd"),B=250;f.orient("bottom").tickPadding(7),g.orient(u?"right":"left"),e.clipEdge(!0).duration(0),j.valueFormatter(function(a,b){return g.tickFormat()(a,b)}).headerFormatter(function(a,b){return f.tickFormat()(a,b)}),i.tooltip.valueFormatter(function(a,b){return g.tickFormat()(a,b)}).headerFormatter(function(a,b){return f.tickFormat()(a,b)});var C=a.utils.renderWatch(A,B),D=function(a){return function(){return{active:a.map(function(a){return!a.disabled})}}},E=function(a){return function(b){void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}};return e.dispatch.on("elementMouseover.tooltip",function(a){a.series.disableTooltip||j.data(a).hidden(!1)}),e.dispatch.on("elementMouseout.tooltip",function(a){j.hidden(!0)}),b.dispatch=A,b.lines=e,b.legend=h,b.focus=k,b.xAxis=f,b.x2Axis=k.xAxis,b.yAxis=g,b.y2Axis=k.yAxis,b.interactiveLayer=i,b.tooltip=j,b.state=x,b.dispatch=A,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return o},set:function(a){o=a}},height:{get:function(){return p},set:function(a){p=a}},showLegend:{get:function(){return q},set:function(a){q=a}},legendPosition:{get:function(){return r},set:function(a){r=a}},showXAxis:{get:function(){return s},set:function(a){s=a}},showYAxis:{get:function(){return t},set:function(a){t=a}},defaultState:{get:function(){return y},set:function(a){y=a}},noData:{get:function(){return z},set:function(a){z=a}},focusEnable:{get:function(){return w},set:function(a){w=a}},focusHeight:{get:function(){return k.height()},set:function(a){k.height(a)}},focusShowAxisX:{get:function(){return k.showXAxis()},set:function(a){k.showXAxis(a)}},focusShowAxisY:{get:function(){return k.showYAxis()},set:function(a){k.showYAxis(a)}},brushExtent:{get:function(){return k.brushExtent()},set:function(a){k.brushExtent(a)}},focusMargin:{get:function(){return k.margin},set:function(a){void 0!==a.top&&(l.top=a.top,m=a.top),k.margin.right=void 0!==a.right?a.right:k.margin.right,k.margin.bottom=void 0!==a.bottom?a.bottom:k.margin.bottom,k.margin.left=void 0!==a.left?a.left:k.margin.left}},margin:{get:function(){return l},set:function(a){l.top=void 0!==a.top?a.top:l.top,l.right=void 0!==a.right?a.right:l.right,l.bottom=void 0!==a.bottom?a.bottom:l.bottom,l.left=void 0!==a.left?a.left:l.left}},duration:{get:function(){return B},set:function(a){B=a,C.reset(B),e.duration(B),k.duration(B),f.duration(B),g.duration(B)}},color:{get:function(){return n},set:function(b){n=a.utils.getColor(b),h.color(n),e.color(n),k.color(n)}},interpolate:{get:function(){return e.interpolate()},set:function(a){e.interpolate(a),k.interpolate(a)}},xTickFormat:{get:function(){return f.tickFormat()},set:function(a){f.tickFormat(a),k.xTickFormat(a)}},yTickFormat:{get:function(){return g.tickFormat()},set:function(a){g.tickFormat(a),k.yTickFormat(a)}},x:{get:function(){return e.x()},set:function(a){e.x(a),k.x(a)}},y:{get:function(){return e.y()},set:function(a){e.y(a),k.y(a)}},rightAlignYAxis:{get:function(){return u},set:function(a){u=a,g.orient(u?"right":"left")}},useInteractiveGuideline:{get:function(){return v},set:function(a){v=a,v&&(e.interactive(!1),e.useVoronoi(!1))}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.lineWithFocusChart=function(){return a.models.lineChart().margin({bottom:30}).focusEnable(!0)},a.models.linePlusBarChart=function(){"use strict";function b(v){return v.each(function(v){function K(a){var b=+("e"==a),c=b?1:-1,d=$/3;return"M"+.5*c+","+d+"A6,6 0 0 "+b+" "+6.5*c+","+(d+6)+"V"+(2*d-6)+"A6,6 0 0 "+b+" "+.5*c+","+2*d+"ZM"+2.5*c+","+(d+8)+"V"+(2*d-8)+"M"+4.5*c+","+(d+8)+"V"+(2*d-8)}function S(){u.empty()||u.extent(J),na.data([u.empty()?e.domain():J]).each(function(a,b){var c=e(a[0])-e.range()[0],d=e.range()[1]-e(a[1]);d3.select(this).select(".left").attr("width",0>c?0:c),d3.select(this).select(".right").attr("x",e(a[1])).attr("width",0>d?0:d)})}function T(){J=u.empty()?null:u.extent(),c=u.empty()?e.domain():u.extent(),L.brush({extent:c,brush:u}),S(),l.width(Y).height(Z).color(v.map(function(a,b){return a.color||D(a,b)}).filter(function(a,b){return!v[b].disabled&&v[b].bar})),j.width(Y).height(Z).color(v.map(function(a,b){return a.color||D(a,b)}).filter(function(a,b){return!v[b].disabled&&!v[b].bar}));var b=ga.select(".nv-focus .nv-barsWrap").datum(aa.length?aa.map(function(a,b){return{key:a.key,values:a.values.filter(function(a,b){return l.x()(a,b)>=c[0]&&l.x()(a,b)<=c[1]})}}):[{values:[]}]),h=ga.select(".nv-focus .nv-linesWrap").datum(W(ba)?[{values:[]}]:ba.filter(function(a){return!a.disabled}).map(function(a,b){return{area:a.area,fillOpacity:a.fillOpacity,strokeWidth:a.strokeWidth,key:a.key,values:a.values.filter(function(a,b){return j.x()(a,b)>=c[0]&&j.x()(a,b)<=c[1]})}}));d=aa.length&&!R?l.xScale():j.xScale(),n.scale(d)._ticks(a.utils.calcTicksX(Y/100,v)).tickSize(-Z,0),n.domain([Math.ceil(c[0]),Math.floor(c[1])]),ga.select(".nv-x.nv-axis").transition().duration(M).call(n),b.transition().duration(M).call(l),h.transition().duration(M).call(j),ga.select(".nv-focus .nv-x.nv-axis").attr("transform","translate(0,"+f.range()[0]+")"),p.scale(f)._ticks(a.utils.calcTicksY(Z/36,v)).tickSize(-Y,0),q.scale(g)._ticks(a.utils.calcTicksY(Z/36,v)),R?q.tickSize(ba.length?0:-Y,0):q.tickSize(aa.length?0:-Y,0);var i=aa.length?1:0,k=ba.length&&!W(ba)?1:0,m=R?k:i,o=R?i:k;ga.select(".nv-focus .nv-y1.nv-axis").style("opacity",m),ga.select(".nv-focus .nv-y2.nv-axis").style("opacity",o).attr("transform","translate("+d.range()[1]+",0)"),ga.select(".nv-focus .nv-y1.nv-axis").transition().duration(M).call(p),ga.select(".nv-focus .nv-y2.nv-axis").transition().duration(M).call(q)}var X=d3.select(this);a.utils.initSVG(X);var Y=a.utils.availableWidth(z,X,w),Z=a.utils.availableHeight(A,X,w)-(F?I:0),$=I-y.top-y.bottom;if(b.update=function(){X.transition().duration(M).call(b)},b.container=this,N.setter(V(v),b.update).getter(U(v)).update(),N.disabled=v.map(function(a){return!!a.disabled}),!O){var _;O={};for(_ in N)N[_]instanceof Array?O[_]=N[_].slice(0):O[_]=N[_]}if(!(v&&v.length&&v.filter(function(a){return a.values.length}).length))return a.utils.noData(b,X),b;X.selectAll(".nv-noData").remove();var aa=v.filter(function(a){return!a.disabled&&a.bar}),ba=v.filter(function(a){return!a.bar});d=aa.length&&!R?l.xScale():j.xScale(),e=o.scale(),f=R?j.yScale():l.yScale(),g=R?l.yScale():j.yScale(),h=R?k.yScale():m.yScale(),i=R?m.yScale():k.yScale();var ca=v.filter(function(a){return!a.disabled&&(R?!a.bar:a.bar)}).map(function(a){return a.values.map(function(a,b){return{x:B(a,b),y:C(a,b)}})}),da=v.filter(function(a){return!a.disabled&&(R?a.bar:!a.bar)}).map(function(a){return a.values.map(function(a,b){return{x:B(a,b),y:C(a,b)}})});d.range([0,Y]),e.domain(d3.extent(d3.merge(ca.concat(da)),function(a){return a.x})).range([0,Y]);var ea=X.selectAll("g.nv-wrap.nv-linePlusBar").data([v]),fa=ea.enter().append("g").attr("class","nvd3 nv-wrap nv-linePlusBar").append("g"),ga=ea.select("g");fa.append("g").attr("class","nv-legendWrap");var ha=fa.append("g").attr("class","nv-focus");ha.append("g").attr("class","nv-x nv-axis"),ha.append("g").attr("class","nv-y1 nv-axis"),ha.append("g").attr("class","nv-y2 nv-axis"),ha.append("g").attr("class","nv-barsWrap"),ha.append("g").attr("class","nv-linesWrap");var ia=fa.append("g").attr("class","nv-context");if(ia.append("g").attr("class","nv-x nv-axis"),ia.append("g").attr("class","nv-y1 nv-axis"),ia.append("g").attr("class","nv-y2 nv-axis"),ia.append("g").attr("class","nv-barsWrap"),ia.append("g").attr("class","nv-linesWrap"),ia.append("g").attr("class","nv-brushBackground"),ia.append("g").attr("class","nv-x nv-brush"),E){var ja=t.align()?Y/2:Y,ka=t.align()?ja:0;t.width(ja),ga.select(".nv-legendWrap").datum(v.map(function(a){return a.originalKey=void 0===a.originalKey?a.key:a.originalKey,R?a.key=a.originalKey+(a.bar?Q:P):a.key=a.originalKey+(a.bar?P:Q),a})).call(t),x||t.height()===w.top||(w.top=t.height(),Z=a.utils.availableHeight(A,X,w)-I),ga.select(".nv-legendWrap").attr("transform","translate("+ka+","+-w.top+")")}else ga.select(".nv-legendWrap").selectAll("*").remove();ea.attr("transform","translate("+w.left+","+w.top+")"),ga.select(".nv-context").style("display",F?"initial":"none"),m.width(Y).height($).color(v.map(function(a,b){return a.color||D(a,b)}).filter(function(a,b){return!v[b].disabled&&v[b].bar})),k.width(Y).height($).color(v.map(function(a,b){return a.color||D(a,b)}).filter(function(a,b){return!v[b].disabled&&!v[b].bar}));var la=ga.select(".nv-context .nv-barsWrap").datum(aa.length?aa:[{values:[]}]),ma=ga.select(".nv-context .nv-linesWrap").datum(W(ba)?[{values:[]}]:ba.filter(function(a){return!a.disabled}));ga.select(".nv-context").attr("transform","translate(0,"+(Z+w.bottom+y.top)+")"),la.transition().call(m),ma.transition().call(k),H&&(o._ticks(a.utils.calcTicksX(Y/100,v)).tickSize(-$,0),ga.select(".nv-context .nv-x.nv-axis").attr("transform","translate(0,"+h.range()[0]+")"),ga.select(".nv-context .nv-x.nv-axis").transition().call(o)),G&&(r.scale(h)._ticks($/36).tickSize(-Y,0),s.scale(i)._ticks($/36).tickSize(aa.length?0:-Y,0),ga.select(".nv-context .nv-y3.nv-axis").style("opacity",aa.length?1:0).attr("transform","translate(0,"+e.range()[0]+")"),ga.select(".nv-context .nv-y2.nv-axis").style("opacity",ba.length?1:0).attr("transform","translate("+e.range()[1]+",0)"),ga.select(".nv-context .nv-y1.nv-axis").transition().call(r),ga.select(".nv-context .nv-y2.nv-axis").transition().call(s)),u.x(e).on("brush",T),J&&u.extent(J);var na=ga.select(".nv-brushBackground").selectAll("g").data([J||u.extent()]),oa=na.enter().append("g");oa.append("rect").attr("class","left").attr("x",0).attr("y",0).attr("height",$),oa.append("rect").attr("class","right").attr("x",0).attr("y",0).attr("height",$);var pa=ga.select(".nv-x.nv-brush").call(u);pa.selectAll("rect").attr("height",$),pa.selectAll(".resize").append("path").attr("d",K),t.dispatch.on("stateChange",function(a){for(var c in a)N[c]=a[c];L.stateChange(N),b.update()}),L.on("changeState",function(a){"undefined"!=typeof a.disabled&&(v.forEach(function(b,c){b.disabled=a.disabled[c]}),N.disabled=a.disabled),b.update()}),T()}),b}var c,d,e,f,g,h,i,j=a.models.line(),k=a.models.line(),l=a.models.historicalBar(),m=a.models.historicalBar(),n=a.models.axis(),o=a.models.axis(),p=a.models.axis(),q=a.models.axis(),r=a.models.axis(),s=a.models.axis(),t=a.models.legend(),u=d3.svg.brush(),v=a.models.tooltip(),w={top:30,right:30,bottom:30,left:60},x=null,y={top:0,right:30,bottom:20,left:60},z=null,A=null,B=function(a){return a.x},C=function(a){return a.y},D=a.utils.defaultColor(),E=!0,F=!0,G=!1,H=!0,I=50,J=null,K=null,L=d3.dispatch("brush","stateChange","changeState"),M=0,N=a.utils.state(),O=null,P=" (left axis)",Q=" (right axis)",R=!1;j.clipEdge(!0),k.interactive(!1),k.pointActive(function(a){return!1}),n.orient("bottom").tickPadding(5),p.orient("left"),q.orient("right"),o.orient("bottom").tickPadding(5),r.orient("left"),s.orient("right"),v.headerEnabled(!0).headerFormatter(function(a,b){return n.tickFormat()(a,b)});var S=function(){return R?{main:q,focus:s}:{main:p,focus:r}},T=function(){return R?{main:p,focus:r}:{main:q,focus:s}},U=function(a){return function(){return{active:a.map(function(a){return!a.disabled})}}},V=function(a){return function(b){void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}},W=function(a){return a.every(function(a){return a.disabled})};return j.dispatch.on("elementMouseover.tooltip",function(a){v.duration(100).valueFormatter(function(a,b){return T().main.tickFormat()(a,b)}).data(a).hidden(!1)}),j.dispatch.on("elementMouseout.tooltip",function(a){v.hidden(!0)}),l.dispatch.on("elementMouseover.tooltip",function(a){a.value=b.x()(a.data),a.series={value:b.y()(a.data),color:a.color},v.duration(0).valueFormatter(function(a,b){return S().main.tickFormat()(a,b)}).data(a).hidden(!1)}),l.dispatch.on("elementMouseout.tooltip",function(a){v.hidden(!0)}),l.dispatch.on("elementMousemove.tooltip",function(a){v()}),b.dispatch=L,b.legend=t,b.lines=j,b.lines2=k,b.bars=l,b.bars2=m,b.xAxis=n,b.x2Axis=o,b.y1Axis=p,b.y2Axis=q,b.y3Axis=r,b.y4Axis=s,b.tooltip=v,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return z},set:function(a){z=a}},height:{get:function(){return A},set:function(a){A=a}},showLegend:{get:function(){return E},set:function(a){E=a}},brushExtent:{get:function(){return J},set:function(a){J=a}},noData:{get:function(){return K},set:function(a){K=a}},focusEnable:{get:function(){return F},set:function(a){F=a}},focusHeight:{get:function(){return I},set:function(a){I=a}},focusShowAxisX:{get:function(){return H},set:function(a){H=a}},focusShowAxisY:{get:function(){return G},set:function(a){G=a}},legendLeftAxisHint:{get:function(){return P},set:function(a){P=a}},legendRightAxisHint:{get:function(){return Q},set:function(a){Q=a}},margin:{get:function(){return w},set:function(a){void 0!==a.top&&(w.top=a.top,x=a.top),w.right=void 0!==a.right?a.right:w.right,w.bottom=void 0!==a.bottom?a.bottom:w.bottom,w.left=void 0!==a.left?a.left:w.left}},focusMargin:{get:function(){return y},set:function(a){y.top=void 0!==a.top?a.top:y.top,y.right=void 0!==a.right?a.right:y.right,y.bottom=void 0!==a.bottom?a.bottom:y.bottom,y.left=void 0!==a.left?a.left:y.left}},duration:{get:function(){return M},set:function(a){M=a}},color:{get:function(){return D},set:function(b){D=a.utils.getColor(b),t.color(D)}},x:{get:function(){return B},set:function(a){B=a,j.x(a),k.x(a),l.x(a),m.x(a)}},y:{get:function(){return C},set:function(a){C=a,j.y(a),k.y(a),l.y(a),m.y(a)}},switchYAxisOrder:{get:function(){return R},set:function(a){if(R!==a){var b=p;p=q,q=b;var c=r;r=s,s=c}R=a,p.orient("left"),q.orient("right"),r.orient("left"),s.orient("right")}}}),a.utils.inheritOptions(b,j),a.utils.initOptions(b),b},a.models.multiBar=function(){"use strict";function b(F){return D.reset(),F.each(function(b){var F=k-j.left-j.right,G=l-j.top-j.bottom;p=d3.select(this),a.utils.initSVG(p);var H=0;if(x&&b.length&&(x=[{values:b[0].values.map(function(a){return{x:a.x,y:0,series:a.series,size:.01}})}]),u){var I=d3.layout.stack().offset(v).values(function(a){return a.values}).y(r)(!b.length&&x?x:b);I.forEach(function(a,c){a.nonStackable?(b[c].nonStackableSeries=H++,I[c]=b[c]):c>0&&I[c-1].nonStackable&&I[c].values.map(function(a,b){a.y0-=I[c-1].values[b].y,a.y1=a.y0+a.y})}),b=I}b.forEach(function(a,b){a.values.forEach(function(c){c.series=b,c.key=a.key})}),u&&b.length>0&&b[0].values.map(function(a,c){var d=0,e=0;b.map(function(a,f){if(!b[f].nonStackable){var g=a.values[c];g.size=Math.abs(g.y),g.y<0?(g.y1=e,e-=g.size):(g.y1=g.size+d,d+=g.size)}})});var J=d&&e?[]:b.map(function(a,b){return a.values.map(function(a,c){return{x:q(a,c),y:r(a,c),y0:a.y0,y1:a.y1,idx:b}})});m.domain(d||d3.merge(J).map(function(a){return a.x})).rangeBands(f||[0,F],A),n.domain(e||d3.extent(d3.merge(J).map(function(a){var c=a.y;return u&&!b[a.idx].nonStackable&&(c=a.y>0?a.y1:a.y1+a.y),c}).concat(s))).range(g||[G,0]),m.domain()[0]===m.domain()[1]&&(m.domain()[0]?m.domain([m.domain()[0]-.01*m.domain()[0],m.domain()[1]+.01*m.domain()[1]]):m.domain([-1,1])),n.domain()[0]===n.domain()[1]&&(n.domain()[0]?n.domain([n.domain()[0]+.01*n.domain()[0],n.domain()[1]-.01*n.domain()[1]]):n.domain([-1,1])),h=h||m,i=i||n;var K=p.selectAll("g.nv-wrap.nv-multibar").data([b]),L=K.enter().append("g").attr("class","nvd3 nv-wrap nv-multibar"),M=L.append("defs"),N=L.append("g"),O=K.select("g");N.append("g").attr("class","nv-groups"),K.attr("transform","translate("+j.left+","+j.top+")"),M.append("clipPath").attr("id","nv-edge-clip-"+o).append("rect"),K.select("#nv-edge-clip-"+o+" rect").attr("width",F).attr("height",G),O.attr("clip-path",t?"url(#nv-edge-clip-"+o+")":"");var P=K.select(".nv-groups").selectAll(".nv-group").data(function(a){return a},function(a,b){return b});P.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6);var Q=D.transition(P.exit().selectAll("rect.nv-bar"),"multibarExit",Math.min(100,z)).attr("y",function(a,c,d){var e=i(0)||0;return u&&b[a.series]&&!b[a.series].nonStackable&&(e=i(a.y0)),e}).attr("height",0).remove();Q.delay&&Q.delay(function(a,b){var c=b*(z/(E+1))-b;return c}),P.attr("class",function(a,b){return"nv-group nv-series-"+b}).classed("hover",function(a){return a.hover}).style("fill",function(a,b){return w(a,b)}).style("stroke",function(a,b){
+return w(a,b)}),P.style("stroke-opacity",1).style("fill-opacity",B);var R=P.selectAll("rect.nv-bar").data(function(a){return x&&!b.length?x.values:a.values});R.exit().remove();R.enter().append("rect").attr("class",function(a,b){return r(a,b)<0?"nv-bar negative":"nv-bar positive"}).attr("x",function(a,c,d){return u&&!b[d].nonStackable?0:d*m.rangeBand()/b.length}).attr("y",function(a,c,d){return i(u&&!b[d].nonStackable?a.y0:0)||0}).attr("height",0).attr("width",function(a,c,d){return m.rangeBand()/(u&&!b[d].nonStackable?1:b.length)}).attr("transform",function(a,b){return"translate("+m(q(a,b))+",0)"});R.style("fill",function(a,b,c){return w(a,c,b)}).style("stroke",function(a,b,c){return w(a,c,b)}).on("mouseover",function(a,b){d3.select(this).classed("hover",!0),C.elementMouseover({data:a,index:b,color:d3.select(this).style("fill")})}).on("mouseout",function(a,b){d3.select(this).classed("hover",!1),C.elementMouseout({data:a,index:b,color:d3.select(this).style("fill")})}).on("mousemove",function(a,b){C.elementMousemove({data:a,index:b,color:d3.select(this).style("fill")})}).on("click",function(a,b){var c=this;C.elementClick({data:a,index:b,color:d3.select(this).style("fill"),event:d3.event,element:c}),d3.event.stopPropagation()}).on("dblclick",function(a,b){C.elementDblClick({data:a,index:b,color:d3.select(this).style("fill")}),d3.event.stopPropagation()}),R.attr("class",function(a,b){return r(a,b)<0?"nv-bar negative":"nv-bar positive"}).attr("transform",function(a,b){return"translate("+m(q(a,b))+",0)"}),y&&(c||(c=b.map(function(){return!0})),R.style("fill",function(a,b,d){return d3.rgb(y(a,b)).darker(c.map(function(a,b){return b}).filter(function(a,b){return!c[b]})[d]).toString()}).style("stroke",function(a,b,d){return d3.rgb(y(a,b)).darker(c.map(function(a,b){return b}).filter(function(a,b){return!c[b]})[d]).toString()}));var S=R.watchTransition(D,"multibar",Math.min(250,z)).delay(function(a,c){return c*z/b[0].values.length});u?S.attr("y",function(a,c,d){var e=0;return e=b[d].nonStackable?r(a,c)<0?n(0):n(0)-n(r(a,c))<-1?n(0)-1:n(r(a,c))||0:n(a.y1)}).attr("height",function(a,c,d){return b[d].nonStackable?Math.max(Math.abs(n(r(a,c))-n(0)),0)||0:Math.max(Math.abs(n(a.y+a.y0)-n(a.y0)),0)}).attr("x",function(a,c,d){var e=0;return b[d].nonStackable&&(e=a.series*m.rangeBand()/b.length,b.length!==H&&(e=b[d].nonStackableSeries*m.rangeBand()/(2*H))),e}).attr("width",function(a,c,d){if(b[d].nonStackable){var e=m.rangeBand()/H;return b.length!==H&&(e=m.rangeBand()/(2*H)),e}return m.rangeBand()}):S.attr("x",function(a,c){return a.series*m.rangeBand()/b.length}).attr("width",m.rangeBand()/b.length).attr("y",function(a,b){return r(a,b)<0?n(0):n(0)-n(r(a,b))<1?n(0)-1:n(r(a,b))||0}).attr("height",function(a,b){return Math.max(Math.abs(n(r(a,b))-n(0)),1)||0}),h=m.copy(),i=n.copy(),b[0]&&b[0].values&&(E=b[0].values.length)}),D.renderEnd("multibar immediate"),b}var c,d,e,f,g,h,i,j={top:0,right:0,bottom:0,left:0},k=960,l=500,m=d3.scale.ordinal(),n=d3.scale.linear(),o=Math.floor(1e4*Math.random()),p=null,q=function(a){return a.x},r=function(a){return a.y},s=[0],t=!0,u=!1,v="zero",w=a.utils.defaultColor(),x=!1,y=null,z=500,A=.1,B=.75,C=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout","elementMousemove","renderEnd"),D=a.utils.renderWatch(C,z),E=0;return b.dispatch=C,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return k},set:function(a){k=a}},height:{get:function(){return l},set:function(a){l=a}},x:{get:function(){return q},set:function(a){q=a}},y:{get:function(){return r},set:function(a){r=a}},xScale:{get:function(){return m},set:function(a){m=a}},yScale:{get:function(){return n},set:function(a){n=a}},xDomain:{get:function(){return d},set:function(a){d=a}},yDomain:{get:function(){return e},set:function(a){e=a}},xRange:{get:function(){return f},set:function(a){f=a}},yRange:{get:function(){return g},set:function(a){g=a}},forceY:{get:function(){return s},set:function(a){s=a}},stacked:{get:function(){return u},set:function(a){u=a}},stackOffset:{get:function(){return v},set:function(a){v=a}},clipEdge:{get:function(){return t},set:function(a){t=a}},disabled:{get:function(){return c},set:function(a){c=a}},id:{get:function(){return o},set:function(a){o=a}},hideable:{get:function(){return x},set:function(a){x=a}},groupSpacing:{get:function(){return A},set:function(a){A=a}},fillOpacity:{get:function(){return B},set:function(a){B=a}},margin:{get:function(){return j},set:function(a){j.top=void 0!==a.top?a.top:j.top,j.right=void 0!==a.right?a.right:j.right,j.bottom=void 0!==a.bottom?a.bottom:j.bottom,j.left=void 0!==a.left?a.left:j.left}},duration:{get:function(){return z},set:function(a){z=a,D.reset(z)}},color:{get:function(){return w},set:function(b){w=a.utils.getColor(b)}},barColor:{get:function(){return y},set:function(b){y=b?a.utils.getColor(b):null}}}),a.utils.initOptions(b),b},a.models.multiBarChart=function(){"use strict";function b(C){return H.reset(),H.models(e),t&&H.models(f),u&&H.models(g),C.each(function(C){var H=d3.select(this);a.utils.initSVG(H);var L=a.utils.availableWidth(n,H,l),M=a.utils.availableHeight(o,H,l);if(b.update=function(){0===F?H.call(b):H.transition().duration(F).call(b)},b.container=this,A.setter(K(C),b.update).getter(J(C)).update(),A.disabled=C.map(function(a){return!!a.disabled}),!B){var N;B={};for(N in A)A[N]instanceof Array?B[N]=A[N].slice(0):B[N]=A[N]}if(!(C&&C.length&&C.filter(function(a){return a.values.length}).length))return a.utils.noData(b,H),b;H.selectAll(".nv-noData").remove(),c=e.xScale(),d=e.yScale();var O=H.selectAll("g.nv-wrap.nv-multiBarWithLegend").data([C]),P=O.enter().append("g").attr("class","nvd3 nv-wrap nv-multiBarWithLegend").append("g"),Q=O.select("g");if(P.append("g").attr("class","nv-x nv-axis"),P.append("g").attr("class","nv-y nv-axis"),P.append("g").attr("class","nv-barsWrap"),P.append("g").attr("class","nv-legendWrap"),P.append("g").attr("class","nv-controlsWrap"),P.append("g").attr("class","nv-interactive"),s?(i.width(L-E()),Q.select(".nv-legendWrap").datum(C).call(i),m||i.height()===l.top||(l.top=i.height(),M=a.utils.availableHeight(o,H,l)),Q.select(".nv-legendWrap").attr("transform","translate("+E()+","+-l.top+")")):Q.select(".nv-legendWrap").selectAll("*").remove(),q){var R=[{key:r.grouped||"Grouped",disabled:e.stacked()},{key:r.stacked||"Stacked",disabled:!e.stacked()}];j.width(E()).color(["#444","#444","#444"]),Q.select(".nv-controlsWrap").datum(R).attr("transform","translate(0,"+-l.top+")").call(j)}else Q.select(".nv-controlsWrap").selectAll("*").remove();O.attr("transform","translate("+l.left+","+l.top+")"),v&&Q.select(".nv-y.nv-axis").attr("transform","translate("+L+",0)"),e.disabled(C.map(function(a){return a.disabled})).width(L).height(M).color(C.map(function(a,b){return a.color||p(a,b)}).filter(function(a,b){return!C[b].disabled}));var S=Q.select(".nv-barsWrap").datum(C.filter(function(a){return!a.disabled}));if(S.call(e),t){f.scale(c)._ticks(a.utils.calcTicksX(L/100,C)).tickSize(-M,0),Q.select(".nv-x.nv-axis").attr("transform","translate(0,"+d.range()[0]+")"),Q.select(".nv-x.nv-axis").call(f);var T=Q.select(".nv-x.nv-axis > g").selectAll("g");if(T.selectAll("line, text").style("opacity",1),x){var U=function(a,b){return"translate("+a+","+b+")"},V=5,W=17;T.selectAll("text").attr("transform",function(a,b,c){return U(0,c%2==0?V:W)});var X=d3.selectAll(".nv-x.nv-axis .nv-wrap g g text")[0].length;Q.selectAll(".nv-x.nv-axis .nv-axisMaxMin text").attr("transform",function(a,b){return U(0,0===b||X%2!==0?W:V)})}y&&Q.selectAll(".tick text").call(a.utils.wrapTicks,b.xAxis.rangeBand()),w&&T.filter(function(a,b){return b%Math.ceil(C[0].values.length/(L/100))!==0}).selectAll("text, line").style("opacity",0),z&&T.selectAll(".tick text").attr("transform","rotate("+z+" 0,0)").style("text-anchor",z>0?"start":"end"),Q.select(".nv-x.nv-axis").selectAll("g.nv-axisMaxMin text").style("opacity",1)}u&&(g.scale(d)._ticks(a.utils.calcTicksY(M/36,C)).tickSize(-L,0),Q.select(".nv-y.nv-axis").call(g)),G&&(h.width(L).height(M).margin({left:l.left,top:l.top}).svgContainer(H).xScale(c),O.select(".nv-interactive").call(h)),i.dispatch.on("stateChange",function(a){for(var c in a)A[c]=a[c];D.stateChange(A),b.update()}),j.dispatch.on("legendClick",function(a,c){if(a.disabled){switch(R=R.map(function(a){return a.disabled=!0,a}),a.disabled=!1,a.key){case"Grouped":case r.grouped:e.stacked(!1);break;case"Stacked":case r.stacked:e.stacked(!0)}A.stacked=e.stacked(),D.stateChange(A),b.update()}}),D.on("changeState",function(a){"undefined"!=typeof a.disabled&&(C.forEach(function(b,c){b.disabled=a.disabled[c]}),A.disabled=a.disabled),"undefined"!=typeof a.stacked&&(e.stacked(a.stacked),A.stacked=a.stacked,I=a.stacked),b.update()}),G?(h.dispatch.on("elementMousemove",function(a){if(void 0!=a.pointXValue){var d,e,f,g,i=[];C.filter(function(a,b){return a.seriesIndex=b,!a.disabled}).forEach(function(h,j){e=c.domain().indexOf(a.pointXValue);var k=h.values[e];void 0!==k&&(g=k.x,void 0===d&&(d=k),void 0===f&&(f=a.mouseX),i.push({key:h.key,value:b.y()(k,e),color:p(h,h.seriesIndex),data:h.values[e]}))}),h.tooltip.data({value:g,index:e,series:i})(),h.renderGuideLine(f)}}),h.dispatch.on("elementMouseout",function(a){h.tooltip.hidden(!0)})):(e.dispatch.on("elementMouseover.tooltip",function(a){a.value=b.x()(a.data),a.series={key:a.data.key,value:b.y()(a.data),color:a.color},k.data(a).hidden(!1)}),e.dispatch.on("elementMouseout.tooltip",function(a){k.hidden(!0)}),e.dispatch.on("elementMousemove.tooltip",function(a){k()}))}),H.renderEnd("multibarchart immediate"),b}var c,d,e=a.models.multiBar(),f=a.models.axis(),g=a.models.axis(),h=a.interactiveGuideline(),i=a.models.legend(),j=a.models.legend(),k=a.models.tooltip(),l={top:30,right:20,bottom:50,left:60},m=null,n=null,o=null,p=a.utils.defaultColor(),q=!0,r={},s=!0,t=!0,u=!0,v=!1,w=!0,x=!1,y=!1,z=0,A=a.utils.state(),B=null,C=null,D=d3.dispatch("stateChange","changeState","renderEnd"),E=function(){return q?180:0},F=250,G=!1;A.stacked=!1,e.stacked(!1),f.orient("bottom").tickPadding(7).showMaxMin(!1).tickFormat(function(a){return a}),g.orient(v?"right":"left").tickFormat(d3.format(",.1f")),k.duration(0).valueFormatter(function(a,b){return g.tickFormat()(a,b)}).headerFormatter(function(a,b){return f.tickFormat()(a,b)}),h.tooltip.valueFormatter(function(a,b){return null==a?"N/A":g.tickFormat()(a,b)}).headerFormatter(function(a,b){return f.tickFormat()(a,b)}),h.tooltip.valueFormatter(function(a,b){return null==a?"N/A":g.tickFormat()(a,b)}).headerFormatter(function(a,b){return f.tickFormat()(a,b)}),h.tooltip.duration(0).valueFormatter(function(a,b){return g.tickFormat()(a,b)}).headerFormatter(function(a,b){return f.tickFormat()(a,b)}),j.updateState(!1);var H=a.utils.renderWatch(D),I=!1,J=function(a){return function(){return{active:a.map(function(a){return!a.disabled}),stacked:I}}},K=function(a){return function(b){void 0!==b.stacked&&(I=b.stacked),void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}};return b.dispatch=D,b.multibar=e,b.legend=i,b.controls=j,b.xAxis=f,b.yAxis=g,b.state=A,b.tooltip=k,b.interactiveLayer=h,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return n},set:function(a){n=a}},height:{get:function(){return o},set:function(a){o=a}},showLegend:{get:function(){return s},set:function(a){s=a}},showControls:{get:function(){return q},set:function(a){q=a}},controlLabels:{get:function(){return r},set:function(a){r=a}},showXAxis:{get:function(){return t},set:function(a){t=a}},showYAxis:{get:function(){return u},set:function(a){u=a}},defaultState:{get:function(){return B},set:function(a){B=a}},noData:{get:function(){return C},set:function(a){C=a}},reduceXTicks:{get:function(){return w},set:function(a){w=a}},rotateLabels:{get:function(){return z},set:function(a){z=a}},staggerLabels:{get:function(){return x},set:function(a){x=a}},wrapLabels:{get:function(){return y},set:function(a){y=!!a}},margin:{get:function(){return l},set:function(a){void 0!==a.top&&(l.top=a.top,m=a.top),l.right=void 0!==a.right?a.right:l.right,l.bottom=void 0!==a.bottom?a.bottom:l.bottom,l.left=void 0!==a.left?a.left:l.left}},duration:{get:function(){return F},set:function(a){F=a,e.duration(F),f.duration(F),g.duration(F),H.reset(F)}},color:{get:function(){return p},set:function(b){p=a.utils.getColor(b),i.color(p)}},rightAlignYAxis:{get:function(){return v},set:function(a){v=a,g.orient(v?"right":"left")}},useInteractiveGuideline:{get:function(){return G},set:function(a){G=a}},barColor:{get:function(){return e.barColor},set:function(a){e.barColor(a),i.color(function(a,b){return d3.rgb("#ccc").darker(1.5*b).toString()})}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.multiBarHorizontal=function(){"use strict";function b(m){return F.reset(),m.each(function(b){var m=k-j.left-j.right,D=l-j.top-j.bottom;n=d3.select(this),a.utils.initSVG(n),w&&(b=d3.layout.stack().offset("zero").values(function(a){return a.values}).y(r)(b)),b.forEach(function(a,b){a.values.forEach(function(c){c.series=b,c.key=a.key})}),w&&b[0].values.map(function(a,c){var d=0,e=0;b.map(function(a){var b=a.values[c];b.size=Math.abs(b.y),b.y<0?(b.y1=e-b.size,e-=b.size):(b.y1=d,d+=b.size)})});var G=d&&e?[]:b.map(function(a){return a.values.map(function(a,b){return{x:q(a,b),y:r(a,b),y0:a.y0,y1:a.y1}})});o.domain(d||d3.merge(G).map(function(a){return a.x})).rangeBands(f||[0,D],A),p.domain(e||d3.extent(d3.merge(G).map(function(a){return w?a.y>0?a.y1+a.y:a.y1:a.y}).concat(t))),x&&!w?p.range(g||[p.domain()[0]<0?z:0,m-(p.domain()[1]>0?z:0)]):p.range(g||[0,m]),h=h||o,i=i||d3.scale.linear().domain(p.domain()).range([p(0),p(0)]);var H=d3.select(this).selectAll("g.nv-wrap.nv-multibarHorizontal").data([b]),I=H.enter().append("g").attr("class","nvd3 nv-wrap nv-multibarHorizontal"),J=(I.append("defs"),I.append("g"));H.select("g");J.append("g").attr("class","nv-groups"),H.attr("transform","translate("+j.left+","+j.top+")");var K=H.select(".nv-groups").selectAll(".nv-group").data(function(a){return a},function(a,b){return b});K.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),K.exit().watchTransition(F,"multibarhorizontal: exit groups").style("stroke-opacity",1e-6).style("fill-opacity",1e-6).remove(),K.attr("class",function(a,b){return"nv-group nv-series-"+b}).classed("hover",function(a){return a.hover}).style("fill",function(a,b){return u(a,b)}).style("stroke",function(a,b){return u(a,b)}),K.watchTransition(F,"multibarhorizontal: groups").style("stroke-opacity",1).style("fill-opacity",B);var L=K.selectAll("g.nv-bar").data(function(a){return a.values});L.exit().remove();var M=L.enter().append("g").attr("transform",function(a,c,d){return"translate("+i(w?a.y0:0)+","+(w?0:d*o.rangeBand()/b.length+o(q(a,c)))+")"});M.append("rect").attr("width",0).attr("height",o.rangeBand()/(w?1:b.length)),L.on("mouseover",function(a,b){d3.select(this).classed("hover",!0),E.elementMouseover({data:a,index:b,color:d3.select(this).style("fill")})}).on("mouseout",function(a,b){d3.select(this).classed("hover",!1),E.elementMouseout({data:a,index:b,color:d3.select(this).style("fill")})}).on("mouseout",function(a,b){E.elementMouseout({data:a,index:b,color:d3.select(this).style("fill")})}).on("mousemove",function(a,b){E.elementMousemove({data:a,index:b,color:d3.select(this).style("fill")})}).on("click",function(a,b){var c=this;E.elementClick({data:a,index:b,color:d3.select(this).style("fill"),event:d3.event,element:c}),d3.event.stopPropagation()}).on("dblclick",function(a,b){E.elementDblClick({data:a,index:b,color:d3.select(this).style("fill")}),d3.event.stopPropagation()}),s(b[0],0)&&(M.append("polyline"),L.select("polyline").attr("fill","none").attr("points",function(a,c){var d=s(a,c),e=.8*o.rangeBand()/(2*(w?1:b.length));d=d.length?d:[-Math.abs(d),Math.abs(d)],d=d.map(function(a){return p(a)-p(0)});var f=[[d[0],-e],[d[0],e],[d[0],0],[d[1],0],[d[1],-e],[d[1],e]];return f.map(function(a){return a.join(",")}).join(" ")}).attr("transform",function(a,c){var d=o.rangeBand()/(2*(w?1:b.length));return"translate("+(r(a,c)<0?0:p(r(a,c))-p(0))+", "+d+")"})),M.append("text"),x&&!w?(L.select("text").attr("text-anchor",function(a,b){return r(a,b)<0?"end":"start"}).attr("y",o.rangeBand()/(2*b.length)).attr("dy",".32em").text(function(a,b){var c=C(r(a,b)),d=s(a,b);return void 0===d?c:d.length?c+"+"+C(Math.abs(d[1]))+"-"+C(Math.abs(d[0])):c+"±"+C(Math.abs(d))}),L.watchTransition(F,"multibarhorizontal: bars").select("text").attr("x",function(a,b){return r(a,b)<0?-4:p(r(a,b))-p(0)+4})):L.selectAll("text").text(""),y&&!w?(M.append("text").classed("nv-bar-label",!0),L.select("text.nv-bar-label").attr("text-anchor",function(a,b){return r(a,b)<0?"start":"end"}).attr("y",o.rangeBand()/(2*b.length)).attr("dy",".32em").text(function(a,b){return q(a,b)}),L.watchTransition(F,"multibarhorizontal: bars").select("text.nv-bar-label").attr("x",function(a,b){return r(a,b)<0?p(0)-p(r(a,b))+4:-4})):L.selectAll("text.nv-bar-label").text(""),L.attr("class",function(a,b){return r(a,b)<0?"nv-bar negative":"nv-bar positive"}),v&&(c||(c=b.map(function(){return!0})),L.style("fill",function(a,b,d){return d3.rgb(v(a,b)).darker(c.map(function(a,b){return b}).filter(function(a,b){return!c[b]})[d]).toString()}).style("stroke",function(a,b,d){return d3.rgb(v(a,b)).darker(c.map(function(a,b){return b}).filter(function(a,b){return!c[b]})[d]).toString()})),w?L.watchTransition(F,"multibarhorizontal: bars").attr("transform",function(a,b){return"translate("+p(a.y1)+","+o(q(a,b))+")"}).select("rect").attr("width",function(a,b){return Math.abs(p(r(a,b)+a.y0)-p(a.y0))||0}).attr("height",o.rangeBand()):L.watchTransition(F,"multibarhorizontal: bars").attr("transform",function(a,c){return"translate("+p(r(a,c)<0?r(a,c):0)+","+(a.series*o.rangeBand()/b.length+o(q(a,c)))+")"}).select("rect").attr("height",o.rangeBand()/b.length).attr("width",function(a,b){return Math.max(Math.abs(p(r(a,b))-p(0)),1)||0}),h=o.copy(),i=p.copy()}),F.renderEnd("multibarHorizontal immediate"),b}var c,d,e,f,g,h,i,j={top:0,right:0,bottom:0,left:0},k=960,l=500,m=Math.floor(1e4*Math.random()),n=null,o=d3.scale.ordinal(),p=d3.scale.linear(),q=function(a){return a.x},r=function(a){return a.y},s=function(a){return a.yErr},t=[0],u=a.utils.defaultColor(),v=null,w=!1,x=!1,y=!1,z=60,A=.1,B=.75,C=d3.format(",.2f"),D=250,E=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout","elementMousemove","renderEnd"),F=a.utils.renderWatch(E,D);return b.dispatch=E,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return k},set:function(a){k=a}},height:{get:function(){return l},set:function(a){l=a}},x:{get:function(){return q},set:function(a){q=a}},y:{get:function(){return r},set:function(a){r=a}},yErr:{get:function(){return s},set:function(a){s=a}},xScale:{get:function(){return o},set:function(a){o=a}},yScale:{get:function(){return p},set:function(a){p=a}},xDomain:{get:function(){return d},set:function(a){d=a}},yDomain:{get:function(){return e},set:function(a){e=a}},xRange:{get:function(){return f},set:function(a){f=a}},yRange:{get:function(){return g},set:function(a){g=a}},forceY:{get:function(){return t},set:function(a){t=a}},stacked:{get:function(){return w},set:function(a){w=a}},showValues:{get:function(){return x},set:function(a){x=a}},disabled:{get:function(){return c},set:function(a){c=a}},id:{get:function(){return m},set:function(a){m=a}},valueFormat:{get:function(){return C},set:function(a){C=a}},valuePadding:{get:function(){return z},set:function(a){z=a}},groupSpacing:{get:function(){return A},set:function(a){A=a}},fillOpacity:{get:function(){return B},set:function(a){B=a}},margin:{get:function(){return j},set:function(a){j.top=void 0!==a.top?a.top:j.top,j.right=void 0!==a.right?a.right:j.right,j.bottom=void 0!==a.bottom?a.bottom:j.bottom,j.left=void 0!==a.left?a.left:j.left}},duration:{get:function(){return D},set:function(a){D=a,F.reset(D)}},color:{get:function(){return u},set:function(b){u=a.utils.getColor(b)}},barColor:{get:function(){return v},set:function(b){v=b?a.utils.getColor(b):null}}}),a.utils.initOptions(b),b},a.models.multiBarHorizontalChart=function(){"use strict";function b(j){return D.reset(),D.models(e),s&&D.models(f),t&&D.models(g),j.each(function(j){var x=d3.select(this);a.utils.initSVG(x);var D=a.utils.availableWidth(m,x,k),E=a.utils.availableHeight(n,x,k);if(b.update=function(){x.transition().duration(A).call(b)},b.container=this,u=e.stacked(),v.setter(C(j),b.update).getter(B(j)).update(),v.disabled=j.map(function(a){return!!a.disabled}),!w){var F;w={};for(F in v)v[F]instanceof Array?w[F]=v[F].slice(0):w[F]=v[F]}if(!(j&&j.length&&j.filter(function(a){return a.values.length}).length))return a.utils.noData(b,x),b;x.selectAll(".nv-noData").remove(),c=e.xScale(),d=e.yScale().clamp(!0);var G=x.selectAll("g.nv-wrap.nv-multiBarHorizontalChart").data([j]),H=G.enter().append("g").attr("class","nvd3 nv-wrap nv-multiBarHorizontalChart").append("g"),I=G.select("g");if(H.append("g").attr("class","nv-x nv-axis"),H.append("g").attr("class","nv-y nv-axis").append("g").attr("class","nv-zeroLine").append("line"),H.append("g").attr("class","nv-barsWrap"),H.append("g").attr("class","nv-legendWrap"),H.append("g").attr("class","nv-controlsWrap"),r?(h.width(D-z()),I.select(".nv-legendWrap").datum(j).call(h),l||h.height()===k.top||(k.top=h.height(),E=a.utils.availableHeight(n,x,k)),I.select(".nv-legendWrap").attr("transform","translate("+z()+","+-k.top+")")):I.select(".nv-legendWrap").selectAll("*").remove(),p){var J=[{key:q.grouped||"Grouped",disabled:e.stacked()},{key:q.stacked||"Stacked",disabled:!e.stacked()}];i.width(z()).color(["#444","#444","#444"]),I.select(".nv-controlsWrap").datum(J).attr("transform","translate(0,"+-k.top+")").call(i)}else I.select(".nv-controlsWrap").selectAll("*").remove();G.attr("transform","translate("+k.left+","+k.top+")"),e.disabled(j.map(function(a){return a.disabled})).width(D).height(E).color(j.map(function(a,b){return a.color||o(a,b)}).filter(function(a,b){return!j[b].disabled}));var K=I.select(".nv-barsWrap").datum(j.filter(function(a){return!a.disabled}));if(K.transition().call(e),s){f.scale(c)._ticks(a.utils.calcTicksY(E/24,j)).tickSize(-D,0),I.select(".nv-x.nv-axis").call(f);var L=I.select(".nv-x.nv-axis").selectAll("g");L.selectAll("line, text")}t&&(g.scale(d)._ticks(a.utils.calcTicksX(D/100,j)).tickSize(-E,0),I.select(".nv-y.nv-axis").attr("transform","translate(0,"+E+")"),I.select(".nv-y.nv-axis").call(g)),I.select(".nv-zeroLine line").attr("x1",d(0)).attr("x2",d(0)).attr("y1",0).attr("y2",-E),h.dispatch.on("stateChange",function(a){for(var c in a)v[c]=a[c];y.stateChange(v),b.update()}),i.dispatch.on("legendClick",function(a,c){if(a.disabled){switch(J=J.map(function(a){return a.disabled=!0,a}),a.disabled=!1,a.key){case"Grouped":case q.grouped:e.stacked(!1);break;case"Stacked":case q.stacked:e.stacked(!0)}v.stacked=e.stacked(),y.stateChange(v),u=e.stacked(),b.update()}}),y.on("changeState",function(a){"undefined"!=typeof a.disabled&&(j.forEach(function(b,c){b.disabled=a.disabled[c]}),v.disabled=a.disabled),"undefined"!=typeof a.stacked&&(e.stacked(a.stacked),v.stacked=a.stacked,u=a.stacked),b.update()})}),D.renderEnd("multibar horizontal chart immediate"),b}var c,d,e=a.models.multiBarHorizontal(),f=a.models.axis(),g=a.models.axis(),h=a.models.legend().height(30),i=a.models.legend().height(30),j=a.models.tooltip(),k={top:30,right:20,bottom:50,left:60},l=null,m=null,n=null,o=a.utils.defaultColor(),p=!0,q={},r=!0,s=!0,t=!0,u=!1,v=a.utils.state(),w=null,x=null,y=d3.dispatch("stateChange","changeState","renderEnd"),z=function(){return p?180:0},A=250;v.stacked=!1,e.stacked(u),f.orient("left").tickPadding(5).showMaxMin(!1).tickFormat(function(a){return a}),g.orient("bottom").tickFormat(d3.format(",.1f")),j.duration(0).valueFormatter(function(a,b){return g.tickFormat()(a,b)}).headerFormatter(function(a,b){return f.tickFormat()(a,b)}),i.updateState(!1);var B=function(a){return function(){return{active:a.map(function(a){return!a.disabled}),stacked:u}}},C=function(a){return function(b){void 0!==b.stacked&&(u=b.stacked),void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}},D=a.utils.renderWatch(y,A);return e.dispatch.on("elementMouseover.tooltip",function(a){a.value=b.x()(a.data),a.series={key:a.data.key,value:b.y()(a.data),color:a.color},j.data(a).hidden(!1)}),e.dispatch.on("elementMouseout.tooltip",function(a){j.hidden(!0)}),e.dispatch.on("elementMousemove.tooltip",function(a){j()}),b.dispatch=y,b.multibar=e,b.legend=h,b.controls=i,b.xAxis=f,b.yAxis=g,b.state=v,b.tooltip=j,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return m},set:function(a){m=a}},height:{get:function(){return n},set:function(a){n=a}},showLegend:{get:function(){return r},set:function(a){r=a}},showControls:{get:function(){return p},set:function(a){p=a}},controlLabels:{get:function(){return q},set:function(a){q=a}},showXAxis:{get:function(){return s},set:function(a){s=a}},showYAxis:{get:function(){return t},set:function(a){t=a}},defaultState:{get:function(){return w},set:function(a){w=a}},noData:{get:function(){return x},set:function(a){x=a}},margin:{get:function(){return k},set:function(a){void 0!==a.top&&(k.top=a.top,l=a.top),k.right=void 0!==a.right?a.right:k.right,k.bottom=void 0!==a.bottom?a.bottom:k.bottom,k.left=void 0!==a.left?a.left:k.left}},duration:{get:function(){return A},set:function(a){A=a,D.reset(A),e.duration(A),f.duration(A),g.duration(A)}},color:{get:function(){return o},set:function(b){o=a.utils.getColor(b),h.color(o)}},barColor:{get:function(){return e.barColor},set:function(a){e.barColor(a),h.color(function(a,b){return d3.rgb("#ccc").darker(1.5*b).toString()})}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.multiChart=function(){"use strict";function b(k){return k.each(function(k){function o(a){var b=2===k[a.seriesIndex].yAxis?G:F;a.value=a.point.x,a.series={value:a.point.y,color:a.point.color,key:a.series.key},I.duration(0).headerFormatter(function(a,b){return E.tickFormat()(a,b)}).valueFormatter(function(a,c){return b.tickFormat()(a,c)}).data(a).hidden(!1)}function s(a){var b=2===k[a.seriesIndex].yAxis?G:F;a.value=a.point.x,a.series={value:a.point.y,color:a.point.color,key:a.series.key},I.duration(100).headerFormatter(function(a,b){return E.tickFormat()(a,b)}).valueFormatter(function(a,c){return b.tickFormat()(a,c)}).data(a).hidden(!1)}function J(a){var b=2===k[a.seriesIndex].yAxis?G:F;a.point.x=C.x()(a.point),a.point.y=C.y()(a.point),I.duration(0).headerFormatter(function(a,b){return E.tickFormat()(a,b)}).valueFormatter(function(a,c){return b.tickFormat()(a,c)}).data(a).hidden(!1)}function L(a){var b=2===k[a.data.series].yAxis?G:F;a.value=A.x()(a.data),a.series={value:A.y()(a.data),color:a.color,key:a.data.key},I.duration(0).headerFormatter(function(a,b){return E.tickFormat()(a,b)}).valueFormatter(function(a,c){return b.tickFormat()(a,c)}).data(a).hidden(!1)}function M(){for(var a=0,b=K.length;b>a;a++){var c=K[a];try{c.clearHighlights()}catch(d){}}}function N(a,b,c){for(var d=0,e=K.length;e>d;d++){var f=K[d];try{f.highlightPoint(a,b,c)}catch(g){}}}var O=d3.select(this);a.utils.initSVG(O),b.update=function(){O.transition().call(b)},b.container=this;var P=a.utils.availableWidth(h,O,e),Q=a.utils.availableHeight(i,O,e),R=k.filter(function(a){return"line"==a.type&&1==a.yAxis}),S=k.filter(function(a){return"line"==a.type&&2==a.yAxis}),T=k.filter(function(a){return"scatter"==a.type&&1==a.yAxis}),U=k.filter(function(a){return"scatter"==a.type&&2==a.yAxis}),V=k.filter(function(a){return"bar"==a.type&&1==a.yAxis}),W=k.filter(function(a){return"bar"==a.type&&2==a.yAxis}),X=k.filter(function(a){return"area"==a.type&&1==a.yAxis}),Y=k.filter(function(a){return"area"==a.type&&2==a.yAxis});if(!(k&&k.length&&k.filter(function(a){return a.values.length}).length))return a.utils.noData(b,O),b;O.selectAll(".nv-noData").remove();var Z=k.filter(function(a){return!a.disabled&&1==a.yAxis}).map(function(a){return a.values.map(function(a,b){return{x:l(a),y:m(a)}})}),$=k.filter(function(a){return!a.disabled&&2==a.yAxis}).map(function(a){return a.values.map(function(a,b){return{x:l(a),y:m(a)}})});t.domain(d3.extent(d3.merge(Z.concat($)),function(a){return a.x})).range([0,P]);var _=O.selectAll("g.wrap.multiChart").data([k]),aa=_.enter().append("g").attr("class","wrap nvd3 multiChart").append("g");aa.append("g").attr("class","nv-x nv-axis"),aa.append("g").attr("class","nv-y1 nv-axis"),aa.append("g").attr("class","nv-y2 nv-axis"),aa.append("g").attr("class","stack1Wrap"),aa.append("g").attr("class","stack2Wrap"),aa.append("g").attr("class","bars1Wrap"),aa.append("g").attr("class","bars2Wrap"),aa.append("g").attr("class","scatters1Wrap"),aa.append("g").attr("class","scatters2Wrap"),aa.append("g").attr("class","lines1Wrap"),aa.append("g").attr("class","lines2Wrap"),aa.append("g").attr("class","legendWrap"),aa.append("g").attr("class","nv-interactive");var ba=_.select("g"),ca=k.map(function(a,b){return k[b].color||g(a,b)});if(j){var da=H.align()?P/2:P,ea=H.align()?da:0;H.width(da),H.color(ca),ba.select(".legendWrap").datum(k.map(function(a){return a.originalKey=void 0===a.originalKey?a.key:a.originalKey,a.key=a.originalKey+(1==a.yAxis?"":r),a})).call(H),f||H.height()===e.top||(e.top=H.height(),Q=a.utils.availableHeight(i,O,e)),ba.select(".legendWrap").attr("transform","translate("+ea+","+-e.top+")")}else ba.select(".legendWrap").selectAll("*").remove();w.width(P).height(Q).interpolate(n).color(ca.filter(function(a,b){return!k[b].disabled&&1==k[b].yAxis&&"line"==k[b].type})),x.width(P).height(Q).interpolate(n).color(ca.filter(function(a,b){return!k[b].disabled&&2==k[b].yAxis&&"line"==k[b].type})),y.width(P).height(Q).color(ca.filter(function(a,b){return!k[b].disabled&&1==k[b].yAxis&&"scatter"==k[b].type})),z.width(P).height(Q).color(ca.filter(function(a,b){return!k[b].disabled&&2==k[b].yAxis&&"scatter"==k[b].type})),A.width(P).height(Q).color(ca.filter(function(a,b){return!k[b].disabled&&1==k[b].yAxis&&"bar"==k[b].type})),B.width(P).height(Q).color(ca.filter(function(a,b){return!k[b].disabled&&2==k[b].yAxis&&"bar"==k[b].type})),C.width(P).height(Q).interpolate(n).color(ca.filter(function(a,b){return!k[b].disabled&&1==k[b].yAxis&&"area"==k[b].type})),D.width(P).height(Q).interpolate(n).color(ca.filter(function(a,b){return!k[b].disabled&&2==k[b].yAxis&&"area"==k[b].type})),ba.attr("transform","translate("+e.left+","+e.top+")");var fa=ba.select(".lines1Wrap").datum(R.filter(function(a){return!a.disabled})),ga=ba.select(".scatters1Wrap").datum(T.filter(function(a){return!a.disabled})),ha=ba.select(".bars1Wrap").datum(V.filter(function(a){return!a.disabled})),ia=ba.select(".stack1Wrap").datum(X.filter(function(a){return!a.disabled})),ja=ba.select(".lines2Wrap").datum(S.filter(function(a){return!a.disabled})),ka=ba.select(".scatters2Wrap").datum(U.filter(function(a){return!a.disabled})),la=ba.select(".bars2Wrap").datum(W.filter(function(a){return!a.disabled})),ma=ba.select(".stack2Wrap").datum(Y.filter(function(a){return!a.disabled})),na=X.length?X.map(function(a){return a.values}).reduce(function(a,b){return a.map(function(a,c){return{x:a.x,y:a.y+b[c].y}})}).concat([{x:0,y:0}]):[],oa=Y.length?Y.map(function(a){return a.values}).reduce(function(a,b){return a.map(function(a,c){return{x:a.x,y:a.y+b[c].y}})}).concat([{x:0,y:0}]):[];u.domain(c||d3.extent(d3.merge(Z).concat(na),function(a){return a.y})).range([0,Q]),v.domain(d||d3.extent(d3.merge($).concat(oa),function(a){return a.y})).range([0,Q]),w.yDomain(u.domain()),y.yDomain(u.domain()),A.yDomain(u.domain()),C.yDomain(u.domain()),x.yDomain(v.domain()),z.yDomain(v.domain()),B.yDomain(v.domain()),D.yDomain(v.domain()),X.length&&d3.transition(ia).call(C),Y.length&&d3.transition(ma).call(D),V.length&&d3.transition(ha).call(A),W.length&&d3.transition(la).call(B),R.length&&d3.transition(fa).call(w),
+S.length&&d3.transition(ja).call(x),T.length&&d3.transition(ga).call(y),U.length&&d3.transition(ka).call(z),E._ticks(a.utils.calcTicksX(P/100,k)).tickSize(-Q,0),ba.select(".nv-x.nv-axis").attr("transform","translate(0,"+Q+")"),d3.transition(ba.select(".nv-x.nv-axis")).call(E),F._ticks(a.utils.calcTicksY(Q/36,k)).tickSize(-P,0),d3.transition(ba.select(".nv-y1.nv-axis")).call(F),G._ticks(a.utils.calcTicksY(Q/36,k)).tickSize(-P,0),d3.transition(ba.select(".nv-y2.nv-axis")).call(G),ba.select(".nv-y1.nv-axis").classed("nv-disabled",Z.length?!1:!0).attr("transform","translate("+t.range()[0]+",0)"),ba.select(".nv-y2.nv-axis").classed("nv-disabled",$.length?!1:!0).attr("transform","translate("+t.range()[1]+",0)"),H.dispatch.on("stateChange",function(a){b.update()}),q&&(p.width(P).height(Q).margin({left:e.left,top:e.top}).svgContainer(O).xScale(t),_.select(".nv-interactive").call(p)),q?(p.dispatch.on("elementMousemove",function(c){M();var d,e,f,h=[];k.filter(function(a,b){return a.seriesIndex=b,!a.disabled}).forEach(function(i,j){var k=t.domain(),l=i.values.filter(function(a,c){return b.x()(a,c)>=k[0]&&b.x()(a,c)<=k[1]});e=a.interactiveBisect(l,c.pointXValue,b.x());var m=l[e],n=b.y()(m,e);null!==n&&N(j,e,!0),void 0!==m&&(void 0===d&&(d=m),void 0===f&&(f=t(b.x()(m,e))),h.push({key:i.key,value:n,color:g(i,i.seriesIndex),data:m,yAxis:2==i.yAxis?G:F}))});var i=function(a,b){var c=h[b].yAxis;return null==a?"N/A":c.tickFormat()(a)};p.tooltip.headerFormatter(function(a,b){return E.tickFormat()(a,b)}).valueFormatter(p.tooltip.valueFormatter()||i).data({value:b.x()(d,e),index:e,series:h})(),p.renderGuideLine(f)}),p.dispatch.on("elementMouseout",function(a){M()})):(w.dispatch.on("elementMouseover.tooltip",o),x.dispatch.on("elementMouseover.tooltip",o),w.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),x.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),y.dispatch.on("elementMouseover.tooltip",s),z.dispatch.on("elementMouseover.tooltip",s),y.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),z.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),C.dispatch.on("elementMouseover.tooltip",J),D.dispatch.on("elementMouseover.tooltip",J),C.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),D.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),A.dispatch.on("elementMouseover.tooltip",L),B.dispatch.on("elementMouseover.tooltip",L),A.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),B.dispatch.on("elementMouseout.tooltip",function(a){I.hidden(!0)}),A.dispatch.on("elementMousemove.tooltip",function(a){I()}),B.dispatch.on("elementMousemove.tooltip",function(a){I()}))}),b}var c,d,e={top:30,right:20,bottom:50,left:60},f=null,g=a.utils.defaultColor(),h=null,i=null,j=!0,k=null,l=function(a){return a.x},m=function(a){return a.y},n="linear",o=!0,p=a.interactiveGuideline(),q=!1,r=" (right axis)",s=250,t=d3.scale.linear(),u=d3.scale.linear(),v=d3.scale.linear(),w=a.models.line().yScale(u).duration(s),x=a.models.line().yScale(v).duration(s),y=a.models.scatter().yScale(u).duration(s),z=a.models.scatter().yScale(v).duration(s),A=a.models.multiBar().stacked(!1).yScale(u).duration(s),B=a.models.multiBar().stacked(!1).yScale(v).duration(s),C=a.models.stackedArea().yScale(u).duration(s),D=a.models.stackedArea().yScale(v).duration(s),E=a.models.axis().scale(t).orient("bottom").tickPadding(5).duration(s),F=a.models.axis().scale(u).orient("left").duration(s),G=a.models.axis().scale(v).orient("right").duration(s),H=a.models.legend().height(30),I=a.models.tooltip(),J=d3.dispatch(),K=[w,x,y,z,A,B,C,D];return b.dispatch=J,b.legend=H,b.lines1=w,b.lines2=x,b.scatters1=y,b.scatters2=z,b.bars1=A,b.bars2=B,b.stack1=C,b.stack2=D,b.xAxis=E,b.yAxis1=F,b.yAxis2=G,b.tooltip=I,b.interactiveLayer=p,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return h},set:function(a){h=a}},height:{get:function(){return i},set:function(a){i=a}},showLegend:{get:function(){return j},set:function(a){j=a}},yDomain1:{get:function(){return c},set:function(a){c=a}},yDomain2:{get:function(){return d},set:function(a){d=a}},noData:{get:function(){return k},set:function(a){k=a}},interpolate:{get:function(){return n},set:function(a){n=a}},legendRightAxisHint:{get:function(){return r},set:function(a){r=a}},margin:{get:function(){return e},set:function(a){void 0!==a.top&&(e.top=a.top,f=a.top),e.right=void 0!==a.right?a.right:e.right,e.bottom=void 0!==a.bottom?a.bottom:e.bottom,e.left=void 0!==a.left?a.left:e.left}},color:{get:function(){return g},set:function(b){g=a.utils.getColor(b)}},x:{get:function(){return l},set:function(a){l=a,w.x(a),x.x(a),y.x(a),z.x(a),A.x(a),B.x(a),C.x(a),D.x(a)}},y:{get:function(){return m},set:function(a){m=a,w.y(a),x.y(a),y.y(a),z.y(a),C.y(a),D.y(a),A.y(a),B.y(a)}},useVoronoi:{get:function(){return o},set:function(a){o=a,w.useVoronoi(a),x.useVoronoi(a),C.useVoronoi(a),D.useVoronoi(a)}},useInteractiveGuideline:{get:function(){return q},set:function(a){q=a,q&&(w.interactive(!1),w.useVoronoi(!1),x.interactive(!1),x.useVoronoi(!1),C.interactive(!1),C.useVoronoi(!1),D.interactive(!1),D.useVoronoi(!1),y.interactive(!1),z.interactive(!1))}},duration:{get:function(){return s},set:function(a){s=a,[w,x,C,D,y,z,E,F,G].forEach(function(a){a.duration(s)})}}}),a.utils.initOptions(b),b},a.models.ohlcBar=function(){"use strict";function b(y){return y.each(function(b){k=d3.select(this);var y=a.utils.availableWidth(h,k,g),A=a.utils.availableHeight(i,k,g);a.utils.initSVG(k);var B=y/b[0].values.length*.9;l.domain(c||d3.extent(b[0].values.map(n).concat(t))),v?l.range(e||[.5*y/b[0].values.length,y*(b[0].values.length-.5)/b[0].values.length]):l.range(e||[5+B/2,y-B/2-5]),m.domain(d||[d3.min(b[0].values.map(s).concat(u)),d3.max(b[0].values.map(r).concat(u))]).range(f||[A,0]),l.domain()[0]===l.domain()[1]&&(l.domain()[0]?l.domain([l.domain()[0]-.01*l.domain()[0],l.domain()[1]+.01*l.domain()[1]]):l.domain([-1,1])),m.domain()[0]===m.domain()[1]&&(m.domain()[0]?m.domain([m.domain()[0]+.01*m.domain()[0],m.domain()[1]-.01*m.domain()[1]]):m.domain([-1,1]));var C=d3.select(this).selectAll("g.nv-wrap.nv-ohlcBar").data([b[0].values]),D=C.enter().append("g").attr("class","nvd3 nv-wrap nv-ohlcBar"),E=D.append("defs"),F=D.append("g"),G=C.select("g");F.append("g").attr("class","nv-ticks"),C.attr("transform","translate("+g.left+","+g.top+")"),k.on("click",function(a,b){z.chartClick({data:a,index:b,pos:d3.event,id:j})}),E.append("clipPath").attr("id","nv-chart-clip-path-"+j).append("rect"),C.select("#nv-chart-clip-path-"+j+" rect").attr("width",y).attr("height",A),G.attr("clip-path",w?"url(#nv-chart-clip-path-"+j+")":"");var H=C.select(".nv-ticks").selectAll(".nv-tick").data(function(a){return a});H.exit().remove(),H.enter().append("path").attr("class",function(a,b,c){return(p(a,b)>q(a,b)?"nv-tick negative":"nv-tick positive")+" nv-tick-"+c+"-"+b}).attr("d",function(a,b){return"m0,0l0,"+(m(p(a,b))-m(r(a,b)))+"l"+-B/2+",0l"+B/2+",0l0,"+(m(s(a,b))-m(p(a,b)))+"l0,"+(m(q(a,b))-m(s(a,b)))+"l"+B/2+",0l"+-B/2+",0z"}).attr("transform",function(a,b){return"translate("+l(n(a,b))+","+m(r(a,b))+")"}).attr("fill",function(a,b){return x[0]}).attr("stroke",function(a,b){return x[0]}).attr("x",0).attr("y",function(a,b){return m(Math.max(0,o(a,b)))}).attr("height",function(a,b){return Math.abs(m(o(a,b))-m(0))}),H.attr("class",function(a,b,c){return(p(a,b)>q(a,b)?"nv-tick negative":"nv-tick positive")+" nv-tick-"+c+"-"+b}),d3.transition(H).attr("transform",function(a,b){return"translate("+l(n(a,b))+","+m(r(a,b))+")"}).attr("d",function(a,c){var d=y/b[0].values.length*.9;return"m0,0l0,"+(m(p(a,c))-m(r(a,c)))+"l"+-d/2+",0l"+d/2+",0l0,"+(m(s(a,c))-m(p(a,c)))+"l0,"+(m(q(a,c))-m(s(a,c)))+"l"+d/2+",0l"+-d/2+",0z"})}),b}var c,d,e,f,g={top:0,right:0,bottom:0,left:0},h=null,i=null,j=Math.floor(1e4*Math.random()),k=null,l=d3.scale.linear(),m=d3.scale.linear(),n=function(a){return a.x},o=function(a){return a.y},p=function(a){return a.open},q=function(a){return a.close},r=function(a){return a.high},s=function(a){return a.low},t=[],u=[],v=!1,w=!0,x=a.utils.defaultColor(),y=!1,z=d3.dispatch("stateChange","changeState","renderEnd","chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout","elementMousemove");return b.highlightPoint=function(a,c){b.clearHighlights(),k.select(".nv-ohlcBar .nv-tick-0-"+a).classed("hover",c)},b.clearHighlights=function(){k.select(".nv-ohlcBar .nv-tick.hover").classed("hover",!1)},b.dispatch=z,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return h},set:function(a){h=a}},height:{get:function(){return i},set:function(a){i=a}},xScale:{get:function(){return l},set:function(a){l=a}},yScale:{get:function(){return m},set:function(a){m=a}},xDomain:{get:function(){return c},set:function(a){c=a}},yDomain:{get:function(){return d},set:function(a){d=a}},xRange:{get:function(){return e},set:function(a){e=a}},yRange:{get:function(){return f},set:function(a){f=a}},forceX:{get:function(){return t},set:function(a){t=a}},forceY:{get:function(){return u},set:function(a){u=a}},padData:{get:function(){return v},set:function(a){v=a}},clipEdge:{get:function(){return w},set:function(a){w=a}},id:{get:function(){return j},set:function(a){j=a}},interactive:{get:function(){return y},set:function(a){y=a}},x:{get:function(){return n},set:function(a){n=a}},y:{get:function(){return o},set:function(a){o=a}},open:{get:function(){return p()},set:function(a){p=a}},close:{get:function(){return q()},set:function(a){q=a}},high:{get:function(){return r},set:function(a){r=a}},low:{get:function(){return s},set:function(a){s=a}},margin:{get:function(){return g},set:function(a){g.top=void 0!=a.top?a.top:g.top,g.right=void 0!=a.right?a.right:g.right,g.bottom=void 0!=a.bottom?a.bottom:g.bottom,g.left=void 0!=a.left?a.left:g.left}},color:{get:function(){return x},set:function(b){x=a.utils.getColor(b)}}}),a.utils.initOptions(b),b},a.models.parallelCoordinates=function(){"use strict";function b(B){return A.reset(),B.each(function(b){function A(a){return x(o.map(function(b){if(isNaN(a.values[b.key])||isNaN(parseFloat(a.values[b.key]))||O){var c=l[b.key].domain(),d=l[b.key].range(),e=c[0]-(c[1]-c[0])/9;if(v.indexOf(b.key)<0){var f=d3.scale.linear().domain([e,c[1]]).range([j-12,d[1]]);l[b.key].brush.y(f),v.push(b.key)}if(isNaN(a.values[b.key])||isNaN(parseFloat(a.values[b.key])))return[k(b.key),l[b.key](e)]}return void 0!==U&&(v.length>0||O?(U.style("display","inline"),V.style("display","inline")):(U.style("display","none"),V.style("display","none"))),[k(b.key),l[b.key](a.values[b.key])]}))}function B(a){s.forEach(function(b){var c=l[b.dimension].brush.y().domain();b.hasOnlyNaN&&(b.extent[1]=(l[b.dimension].domain()[1]-c[0])*(b.extent[1]-b.extent[0])/(N[b.dimension]-b.extent[0])+c[0]),b.hasNaN&&(b.extent[0]=c[0]),a&&l[b.dimension].brush.extent(b.extent)}),e.select(".nv-brushBackground").each(function(a){d3.select(this).call(l[a.key].brush)}).selectAll("rect").attr("x",-8).attr("width",16),F()}function C(){q===!1&&(q=!0,B(!0))}function D(){$=p.filter(function(a){return!l[a].brush.empty()}),_=$.map(function(a){return l[a].brush.extent()}),s=[],$.forEach(function(a,b){s[b]={dimension:a,extent:_[b],hasNaN:!1,hasOnlyNaN:!1}}),t=[],c.style("display",function(a){var b=$.every(function(b,c){return(isNaN(a.values[b])||isNaN(parseFloat(a.values[b])))&&_[c][0]==l[b].brush.y().domain()[0]?!0:_[c][0]<=a.values[b]&&a.values[b]<=_[c][1]&&!isNaN(parseFloat(a.values[b]))});return b&&t.push(a),b?null:"none"}),F(),z.brush({filters:s,active:t})}function E(){var a=$.length>0?!0:!1;s.forEach(function(a){a.extent[0]===l[a.dimension].brush.y().domain()[0]&&v.indexOf(a.dimension)>=0&&(a.hasNaN=!0),a.extent[1]<l[a.dimension].domain()[0]&&(a.hasOnlyNaN=!0)}),z.brushEnd(t,a)}function F(){e.select(".nv-axis").each(function(a,b){var c=s.filter(function(b){return b.dimension==a.key});P[a.key]=l[a.key].domain(),0!=c.length&&q&&(P[a.key]=[],c[0].extent[1]>l[a.key].domain()[0]&&(P[a.key]=[c[0].extent[1]]),c[0].extent[0]>=l[a.key].domain()[0]&&P[a.key].push(c[0].extent[0])),d3.select(this).call(y.scale(l[a.key]).tickFormat(a.format).tickValues(P[a.key]))})}function G(a){u[a.key]=this.parentNode.__origin__=k(a.key),d.attr("visibility","hidden")}function H(a){u[a.key]=Math.min(i,Math.max(0,this.parentNode.__origin__+=d3.event.x)),c.attr("d",A),o.sort(function(a,b){return J(a.key)-J(b.key)}),o.forEach(function(a,b){return a.currentPosition=b}),k.domain(o.map(function(a){return a.key})),e.attr("transform",function(a){return"translate("+J(a.key)+")"})}function I(a,b){delete this.parentNode.__origin__,delete u[a.key],d3.select(this.parentNode).attr("transform","translate("+k(a.key)+")"),c.attr("d",A),d.attr("d",A).attr("visibility",null),z.dimensionsOrder(o)}function J(a){var b=u[a];return null==b?k(a):b}var K=d3.select(this);if(i=a.utils.availableWidth(g,K,f),j=a.utils.availableHeight(h,K,f),a.utils.initSVG(K),void 0===b[0].values){var L=[];b.forEach(function(a){var b={},c=Object.keys(a);c.forEach(function(c){"name"!==c&&(b[c]=a[c])}),L.push({key:a.name,values:b})}),b=L}var M=b.map(function(a){return a.values});0===t.length&&(t=b),p=n.sort(function(a,b){return a.currentPosition-b.currentPosition}).map(function(a){return a.key}),o=n.filter(function(a){return!a.disabled}),k.rangePoints([0,i],1).domain(o.map(function(a){return a.key}));var N={},O=!1,P=[];p.forEach(function(a){var b=d3.extent(M,function(b){return+b[a]}),c=b[0],d=b[1],e=!1;(isNaN(c)||isNaN(d))&&(e=!0,c=0,d=0),c===d&&(c-=1,d+=1);var f=s.filter(function(b){return b.dimension==a});0!==f.length&&(e?(c=l[a].domain()[0],d=l[a].domain()[1]):!f[0].hasOnlyNaN&&q?(c=c>f[0].extent[0]?f[0].extent[0]:c,d=d<f[0].extent[1]?f[0].extent[1]:d):f[0].hasNaN&&(d=d<f[0].extent[1]?f[0].extent[1]:d,N[a]=l[a].domain()[1],O=!0)),l[a]=d3.scale.linear().domain([c,d]).range([.9*(j-12),0]),v=[],l[a].brush=d3.svg.brush().y(l[a]).on("brushstart",C).on("brush",D).on("brushend",E)});var Q=K.selectAll("g.nv-wrap.nv-parallelCoordinates").data([b]),R=Q.enter().append("g").attr("class","nvd3 nv-wrap nv-parallelCoordinates"),S=R.append("g"),T=Q.select("g");S.append("g").attr("class","nv-parallelCoordinates background"),S.append("g").attr("class","nv-parallelCoordinates foreground"),S.append("g").attr("class","nv-parallelCoordinates missingValuesline"),Q.attr("transform","translate("+f.left+","+f.top+")"),x.interpolate("cardinal").tension(w),y.orient("left");var U,V,W=d3.behavior.drag().on("dragstart",G).on("drag",H).on("dragend",I),X=k.range()[1]-k.range()[0];if(X=isNaN(X)?k.range()[0]:X,!isNaN(X)){var Y=[0+X/2,j-12,i-X/2,j-12];U=Q.select(".missingValuesline").selectAll("line").data([Y]),U.enter().append("line"),U.exit().remove(),U.attr("x1",function(a){return a[0]}).attr("y1",function(a){return a[1]}).attr("x2",function(a){return a[2]}).attr("y2",function(a){return a[3]}),V=Q.select(".missingValuesline").selectAll("text").data([m]),V.append("text").data([m]),V.enter().append("text"),V.exit().remove(),V.attr("y",j).attr("x",i-92-X/2).text(function(a){return a})}d=Q.select(".background").selectAll("path").data(b),d.enter().append("path"),d.exit().remove(),d.attr("d",A),c=Q.select(".foreground").selectAll("path").data(b),c.enter().append("path"),c.exit().remove(),c.attr("d",A).style("stroke-width",function(a,b){return isNaN(a.strokeWidth)&&(a.strokeWidth=1),a.strokeWidth}).attr("stroke",function(a,b){return a.color||r(a,b)}),c.on("mouseover",function(a,b){d3.select(this).classed("hover",!0).style("stroke-width",a.strokeWidth+2+"px").style("stroke-opacity",1),z.elementMouseover({label:a.name,color:a.color||r(a,b),values:a.values,dimensions:o})}),c.on("mouseout",function(a,b){d3.select(this).classed("hover",!1).style("stroke-width",a.strokeWidth+"px").style("stroke-opacity",.7),z.elementMouseout({label:a.name,index:b})}),c.on("mousemove",function(a,b){z.elementMousemove()}),c.on("click",function(a){z.elementClick({id:a.id})}),e=T.selectAll(".dimension").data(o);var Z=e.enter().append("g").attr("class","nv-parallelCoordinates dimension");e.attr("transform",function(a){return"translate("+k(a.key)+",0)"}),Z.append("g").attr("class","nv-axis"),Z.append("text").attr("class","nv-label").style("cursor","move").attr("dy","-1em").attr("text-anchor","middle").on("mouseover",function(a,b){z.elementMouseover({label:a.tooltip||a.key,color:a.color})}).on("mouseout",function(a,b){z.elementMouseout({label:a.tooltip})}).on("mousemove",function(a,b){z.elementMousemove()}).call(W),Z.append("g").attr("class","nv-brushBackground"),e.exit().remove(),e.select(".nv-label").text(function(a){return a.key}),B(q);var $=p.filter(function(a){return!l[a].brush.empty()}),_=$.map(function(a){return l[a].brush.extent()}),aa=t.slice(0);t=[],c.style("display",function(a){var b=$.every(function(b,c){return(isNaN(a.values[b])||isNaN(parseFloat(a.values[b])))&&_[c][0]==l[b].brush.y().domain()[0]?!0:_[c][0]<=a.values[b]&&a.values[b]<=_[c][1]&&!isNaN(parseFloat(a.values[b]))});return b&&t.push(a),b?null:"none"}),(s.length>0||!a.utils.arrayEquals(t,aa))&&z.activeChanged(t)}),b}var c,d,e,f={top:30,right:0,bottom:10,left:0},g=null,h=null,i=null,j=null,k=d3.scale.ordinal(),l={},m="undefined values",n=[],o=[],p=[],q=!0,r=a.utils.defaultColor(),s=[],t=[],u=[],v=[],w=1,x=d3.svg.line(),y=d3.svg.axis(),z=d3.dispatch("brushstart","brush","brushEnd","dimensionsOrder","stateChange","elementClick","elementMouseover","elementMouseout","elementMousemove","renderEnd","activeChanged"),A=a.utils.renderWatch(z);return b.dispatch=z,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return g},set:function(a){g=a}},height:{get:function(){return h},set:function(a){h=a}},dimensionData:{get:function(){return n},set:function(a){n=a}},displayBrush:{get:function(){return q},set:function(a){q=a}},filters:{get:function(){return s},set:function(a){s=a}},active:{get:function(){return t},set:function(a){t=a}},lineTension:{get:function(){return w},set:function(a){w=a}},undefinedValuesLabel:{get:function(){return m},set:function(a){m=a}},dimensions:{get:function(){return n.map(function(a){return a.key})},set:function(b){a.deprecated("dimensions","use dimensionData instead"),0===n.length?b.forEach(function(a){n.push({key:a})}):b.forEach(function(a,b){n[b].key=a})}},dimensionNames:{get:function(){return n.map(function(a){return a.key})},set:function(b){a.deprecated("dimensionNames","use dimensionData instead"),p=[],0===n.length?b.forEach(function(a){n.push({key:a})}):b.forEach(function(a,b){n[b].key=a})}},dimensionFormats:{get:function(){return n.map(function(a){return a.format})},set:function(b){a.deprecated("dimensionFormats","use dimensionData instead"),0===n.length?b.forEach(function(a){n.push({format:a})}):b.forEach(function(a,b){n[b].format=a})}},margin:{get:function(){return f},set:function(a){f.top=void 0!==a.top?a.top:f.top,f.right=void 0!==a.right?a.right:f.right,f.bottom=void 0!==a.bottom?a.bottom:f.bottom,f.left=void 0!==a.left?a.left:f.left}},color:{get:function(){return r},set:function(b){r=a.utils.getColor(b)}}}),a.utils.initOptions(b),b},a.models.parallelCoordinatesChart=function(){"use strict";function b(e){return s.reset(),s.models(c),e.each(function(e){var k=d3.select(this);a.utils.initSVG(k);var p=a.utils.availableWidth(h,k,f),q=a.utils.availableHeight(i,k,f);if(b.update=function(){k.call(b)},b.container=this,l.setter(u(m),b.update).getter(t(m)).update(),l.disabled=m.map(function(a){return!!a.disabled}),m=m.map(function(a){return a.disabled=!!a.disabled,a}),m.forEach(function(a,b){a.originalPosition=isNaN(a.originalPosition)?b:a.originalPosition,a.currentPosition=isNaN(a.currentPosition)?b:a.currentPosition}),!o){var s;o={};for(s in l)l[s]instanceof Array?o[s]=l[s].slice(0):o[s]=l[s]}if(!e||!e.length)return a.utils.noData(b,k),b;k.selectAll(".nv-noData").remove();var v=k.selectAll("g.nv-wrap.nv-parallelCoordinatesChart").data([e]),w=v.enter().append("g").attr("class","nvd3 nv-wrap nv-parallelCoordinatesChart").append("g"),x=v.select("g");w.append("g").attr("class","nv-parallelCoordinatesWrap"),w.append("g").attr("class","nv-legendWrap"),x.select("rect").attr("width",p).attr("height",q>0?q:0),j?(d.width(p).color(function(a){return"rgb(188,190,192)"}),x.select(".nv-legendWrap").datum(m.sort(function(a,b){return a.originalPosition-b.originalPosition})).call(d),g||d.height()===f.top||(f.top=d.height(),q=a.utils.availableHeight(i,k,f)),v.select(".nv-legendWrap").attr("transform","translate( 0 ,"+-f.top+")")):x.select(".nv-legendWrap").selectAll("*").remove(),v.attr("transform","translate("+f.left+","+f.top+")"),c.width(p).height(q).dimensionData(m).displayBrush(n);var y=x.select(".nv-parallelCoordinatesWrap ").datum(e);y.transition().call(c),c.dispatch.on("brushEnd",function(a,b){b?(n=!0,r.brushEnd(a)):n=!1}),d.dispatch.on("stateChange",function(a){for(var c in a)l[c]=a[c];r.stateChange(l),b.update()}),c.dispatch.on("dimensionsOrder",function(a){m.sort(function(a,b){return a.currentPosition-b.currentPosition});var b=!1;m.forEach(function(a,c){a.currentPosition=c,a.currentPosition!==a.originalPosition&&(b=!0)}),r.dimensionsOrder(m,b)}),r.on("changeState",function(a){"undefined"!=typeof a.disabled&&(m.forEach(function(b,c){b.disabled=a.disabled[c]}),l.disabled=a.disabled),b.update()})}),s.renderEnd("parraleleCoordinateChart immediate"),b}var c=a.models.parallelCoordinates(),d=a.models.legend(),e=a.models.tooltip(),f=(a.models.tooltip(),{top:0,right:0,bottom:0,left:0}),g=null,h=null,i=null,j=!0,k=a.utils.defaultColor(),l=a.utils.state(),m=[],n=!0,o=null,p=null,q="undefined",r=d3.dispatch("dimensionsOrder","brushEnd","stateChange","changeState","renderEnd"),s=a.utils.renderWatch(r),t=function(a){return function(){return{active:a.map(function(a){return!a.disabled})}}},u=function(a){return function(b){void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}};return e.contentGenerator(function(a){var b='<table><thead><tr><td class="legend-color-guide"><div style="background-color:'+a.color+'"></div></td><td><strong>'+a.key+"</strong></td></tr></thead>";return 0!==a.series.length&&(b+='<tbody><tr><td height ="10px"></td></tr>',a.series.forEach(function(a){b=b+'<tr><td class="legend-color-guide"><div style="background-color:'+a.color+'"></div></td><td class="key">'+a.key+'</td><td class="value">'+a.value+"</td></tr>"}),b+="</tbody>"),b+="</table>"}),c.dispatch.on("elementMouseover.tooltip",function(a){var b={key:a.label,color:a.color,series:[]};a.values&&(Object.keys(a.values).forEach(function(c){var d=a.dimensions.filter(function(a){return a.key===c})[0];if(d){var e;e=isNaN(a.values[c])||isNaN(parseFloat(a.values[c]))?q:d.format(a.values[c]),b.series.push({idx:d.currentPosition,key:c,value:e,color:d.color})}}),b.series.sort(function(a,b){return a.idx-b.idx})),e.data(b).hidden(!1)}),c.dispatch.on("elementMouseout.tooltip",function(a){e.hidden(!0)}),c.dispatch.on("elementMousemove.tooltip",function(){e()}),b.dispatch=r,b.parallelCoordinates=c,b.legend=d,b.tooltip=e,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return h},set:function(a){h=a}},height:{get:function(){return i},set:function(a){i=a}},showLegend:{get:function(){return j},set:function(a){j=a}},defaultState:{get:function(){return o},set:function(a){o=a}},dimensionData:{get:function(){return m},set:function(a){m=a}},displayBrush:{get:function(){return n},set:function(a){n=a}},noData:{get:function(){return p},set:function(a){p=a}},nanValue:{get:function(){return q},set:function(a){q=a}},margin:{get:function(){return f},set:function(a){void 0!==a.top&&(f.top=a.top,g=a.top),f.right=void 0!==a.right?a.right:f.right,f.bottom=void 0!==a.bottom?a.bottom:f.bottom,f.left=void 0!==a.left?a.left:f.left}},color:{get:function(){return k},set:function(b){k=a.utils.getColor(b),d.color(k),c.color(k)}}}),a.utils.inheritOptions(b,c),a.utils.initOptions(b),b},a.models.pie=function(){"use strict";function b(F){return E.reset(),F.each(function(b){function F(a,b){a.endAngle=isNaN(a.endAngle)?0:a.endAngle,a.startAngle=isNaN(a.startAngle)?0:a.startAngle,p||(a.innerRadius=0);var c=d3.interpolate(this._current,a);return this._current=c(0),function(a){return C[b](c(a))}}var G=d-c.left-c.right,H=e-c.top-c.bottom,I=Math.min(G,H)/2,J=[],K=[];if(i=d3.select(this),0===A.length)for(var L=I-I/5,M=y*I,N=0;N<b[0].length;N++)J.push(L),K.push(M);else r?(J=A.map(function(a){return(a.outer-a.outer/5)*I}),K=A.map(function(a){return(a.inner-a.inner/5)*I}),y=d3.min(A.map(function(a){return a.inner-a.inner/5}))):(J=A.map(function(a){return a.outer*I}),K=A.map(function(a){return a.inner*I}),y=d3.min(A.map(function(a){return a.inner})));a.utils.initSVG(i);var O=i.selectAll(".nv-wrap.nv-pie").data(b),P=O.enter().append("g").attr("class","nvd3 nv-wrap nv-pie nv-chart-"+h),Q=P.append("g"),R=O.select("g"),S=Q.append("g").attr("class","nv-pie");Q.append("g").attr("class","nv-pieLabels"),O.attr("transform","translate("+c.left+","+c.top+")"),R.select(".nv-pie").attr("transform","translate("+G/2+","+H/2+")"),R.select(".nv-pieLabels").attr("transform","translate("+G/2+","+H/2+")"),i.on("click",function(a,b){B.chartClick({data:a,index:b,pos:d3.event,id:h})}),C=[],D=[];for(var N=0;N<b[0].length;N++){var T=d3.svg.arc().outerRadius(J[N]),U=d3.svg.arc().outerRadius(J[N]+5);u!==!1&&(T.startAngle(u),U.startAngle(u)),w!==!1&&(T.endAngle(w),U.endAngle(w)),p&&(T.innerRadius(K[N]),U.innerRadius(K[N])),T.cornerRadius&&x&&(T.cornerRadius(x),U.cornerRadius(x)),C.push(T),D.push(U)}var V=d3.layout.pie().sort(null).value(function(a){return a.disabled?0:g(a)});V.padAngle&&v&&V.padAngle(v),p&&q&&(S.append("text").attr("class","nv-pie-title"),O.select(".nv-pie-title").style("text-anchor","middle").text(function(a){return q}).style("font-size",Math.min(G,H)*y*2/(q.length+2)+"px").attr("dy","0.35em").attr("transform",function(a,b){return"translate(0, "+s+")"}));var W=O.select(".nv-pie").selectAll(".nv-slice").data(V),X=O.select(".nv-pieLabels").selectAll(".nv-label").data(V);W.exit().remove(),X.exit().remove();var Y=W.enter().append("g");Y.attr("class","nv-slice"),Y.on("mouseover",function(a,b){d3.select(this).classed("hover",!0),r&&d3.select(this).select("path").transition().duration(70).attr("d",D[b]),B.elementMouseover({data:a.data,index:b,color:d3.select(this).style("fill"),percent:(a.endAngle-a.startAngle)/(2*Math.PI)})}),Y.on("mouseout",function(a,b){d3.select(this).classed("hover",!1),r&&d3.select(this).select("path").transition().duration(50).attr("d",C[b]),B.elementMouseout({data:a.data,index:b})}),Y.on("mousemove",function(a,b){B.elementMousemove({data:a.data,index:b})}),Y.on("click",function(a,b){var c=this;B.elementClick({data:a.data,index:b,color:d3.select(this).style("fill"),event:d3.event,element:c})}),Y.on("dblclick",function(a,b){B.elementDblClick({data:a.data,index:b,color:d3.select(this).style("fill")})}),W.attr("fill",function(a,b){return j(a.data,b)}),W.attr("stroke",function(a,b){return j(a.data,b)});Y.append("path").each(function(a){this._current=a});if(W.select("path").transition().duration(z).attr("d",function(a,b){return C[b](a)}).attrTween("d",F),l){for(var Z=[],N=0;N<b[0].length;N++)Z.push(C[N]),m?p&&(Z[N]=d3.svg.arc().outerRadius(C[N].outerRadius()),u!==!1&&Z[N].startAngle(u),w!==!1&&Z[N].endAngle(w)):p||Z[N].innerRadius(0);X.enter().append("g").classed("nv-label",!0).each(function(a,b){var c=d3.select(this);c.attr("transform",function(a,b){if(t){a.outerRadius=J[b]+10,a.innerRadius=J[b]+15;var c=(a.startAngle+a.endAngle)/2*(180/Math.PI);return(a.startAngle+a.endAngle)/2<Math.PI?c-=90:c+=90,"translate("+Z[b].centroid(a)+") rotate("+c+")"}return a.outerRadius=I+10,a.innerRadius=I+15,"translate("+Z[b].centroid(a)+")"}),c.append("rect").style("stroke","#fff").style("fill","#fff").attr("rx",3).attr("ry",3),c.append("text").style("text-anchor",t?(a.startAngle+a.endAngle)/2<Math.PI?"start":"end":"middle").style("fill","#000")});var $={},_=14,aa=140,ba=function(a){return Math.floor(a[0]/aa)*aa+","+Math.floor(a[1]/_)*_},ca=function(a){return(a.endAngle-a.startAngle)/(2*Math.PI)};X.watchTransition(E,"pie labels").attr("transform",function(a,b){if(t){a.outerRadius=J[b]+10,a.innerRadius=J[b]+15;var c=(a.startAngle+a.endAngle)/2*(180/Math.PI);return(a.startAngle+a.endAngle)/2<Math.PI?c-=90:c+=90,"translate("+Z[b].centroid(a)+") rotate("+c+")"}a.outerRadius=I+10,a.innerRadius=I+15;var d=Z[b].centroid(a),e=ca(a);if(a.value&&e>=o){var f=ba(d);$[f]&&(d[1]-=_),$[ba(d)]=!0}return"translate("+d+")"}),X.select(".nv-label text").style("text-anchor",function(a,b){return t?(a.startAngle+a.endAngle)/2<Math.PI?"start":"end":"middle"}).text(function(a,b){var c=ca(a),d="";if(!a.value||o>c)return"";if("function"==typeof n)d=n(a,b,{key:f(a.data),value:g(a.data),percent:k(c)});else switch(n){case"key":d=f(a.data);break;case"value":d=k(g(a.data));break;case"percent":d=d3.format("%")(c)}return d})}}),E.renderEnd("pie immediate"),b}var c={top:0,right:0,bottom:0,left:0},d=500,e=500,f=function(a){return a.x},g=function(a){return a.y},h=Math.floor(1e4*Math.random()),i=null,j=a.utils.defaultColor(),k=d3.format(",.2f"),l=!0,m=!1,n="key",o=.02,p=!1,q=!1,r=!0,s=0,t=!1,u=!1,v=!1,w=!1,x=0,y=.5,z=250,A=[],B=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout","elementMousemove","renderEnd"),C=[],D=[],E=a.utils.renderWatch(B);return b.dispatch=B,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{arcsRadius:{get:function(){return A},set:function(a){A=a}},width:{get:function(){return d},set:function(a){d=a}},height:{get:function(){return e},set:function(a){e=a}},showLabels:{get:function(){return l},set:function(a){l=a}},title:{get:function(){return q},set:function(a){q=a}},titleOffset:{get:function(){return s},set:function(a){s=a}},labelThreshold:{get:function(){return o},set:function(a){o=a}},valueFormat:{get:function(){return k},set:function(a){k=a}},x:{get:function(){return f},set:function(a){f=a}},id:{get:function(){return h},set:function(a){h=a}},endAngle:{get:function(){return w},set:function(a){w=a}},startAngle:{get:function(){return u},set:function(a){u=a}},padAngle:{get:function(){return v},set:function(a){v=a}},cornerRadius:{get:function(){return x},set:function(a){x=a}},donutRatio:{get:function(){return y},set:function(a){y=a}},labelsOutside:{get:function(){return m},set:function(a){m=a}},labelSunbeamLayout:{get:function(){return t},set:function(a){t=a}},donut:{get:function(){return p},set:function(a){p=a}},growOnHover:{get:function(){return r},set:function(a){r=a}},pieLabelsOutside:{get:function(){return m},set:function(b){m=b,a.deprecated("pieLabelsOutside","use labelsOutside instead")}},donutLabelsOutside:{get:function(){return m},set:function(b){m=b,a.deprecated("donutLabelsOutside","use labelsOutside instead")}},labelFormat:{get:function(){return k},set:function(b){k=b,a.deprecated("labelFormat","use valueFormat instead")}},margin:{get:function(){return c},set:function(a){c.top="undefined"!=typeof a.top?a.top:c.top,c.right="undefined"!=typeof a.right?a.right:c.right,c.bottom="undefined"!=typeof a.bottom?a.bottom:c.bottom,c.left="undefined"!=typeof a.left?a.left:c.left}},duration:{get:function(){return z},set:function(a){z=a,E.reset(z)}},y:{get:function(){return g},set:function(a){g=d3.functor(a)}},color:{get:function(){return j},set:function(b){j=a.utils.getColor(b)}},labelType:{get:function(){return n},set:function(a){n=a||"key"}}}),a.utils.initOptions(b),b},a.models.pieChart=function(){"use strict";function b(e){return s.reset(),s.models(c),e.each(function(e){var j=d3.select(this);a.utils.initSVG(j);var m=a.utils.availableWidth(h,j,f),p=a.utils.availableHeight(i,j,f);if(b.update=function(){j.transition().call(b)},b.container=this,n.setter(u(e),b.update).getter(t(e)).update(),n.disabled=e.map(function(a){return!!a.disabled}),!o){var q;o={};for(q in n)n[q]instanceof Array?o[q]=n[q].slice(0):o[q]=n[q]}if(!e||!e.length)return a.utils.noData(b,j),b;j.selectAll(".nv-noData").remove();var s=j.selectAll("g.nv-wrap.nv-pieChart").data([e]),v=s.enter().append("g").attr("class","nvd3 nv-wrap nv-pieChart").append("g"),w=s.select("g");
+if(v.append("g").attr("class","nv-pieWrap"),v.append("g").attr("class","nv-legendWrap"),k){if("top"===l)d.width(m).key(c.x()),s.select(".nv-legendWrap").datum(e).call(d),g||d.height()===f.top||(f.top=d.height(),p=a.utils.availableHeight(i,j,f)),s.select(".nv-legendWrap").attr("transform","translate(0,"+-f.top+")");else if("right"===l){var x=a.models.legend().width();x>m/2&&(x=m/2),d.height(p).key(c.x()),d.width(x),m-=d.width(),s.select(".nv-legendWrap").datum(e).call(d).attr("transform","translate("+m+",0)")}}else w.select(".nv-legendWrap").selectAll("*").remove();s.attr("transform","translate("+f.left+","+f.top+")"),c.width(m).height(p);var y=w.select(".nv-pieWrap").datum([e]);d3.transition(y).call(c),d.dispatch.on("stateChange",function(a){for(var c in a)n[c]=a[c];r.stateChange(n),b.update()}),r.on("changeState",function(a){"undefined"!=typeof a.disabled&&(e.forEach(function(b,c){b.disabled=a.disabled[c]}),n.disabled=a.disabled),b.update()})}),s.renderEnd("pieChart immediate"),b}var c=a.models.pie(),d=a.models.legend(),e=a.models.tooltip(),f={top:30,right:20,bottom:20,left:20},g=null,h=null,i=null,j=!1,k=!0,l="top",m=a.utils.defaultColor(),n=a.utils.state(),o=null,p=null,q=250,r=d3.dispatch("stateChange","changeState","renderEnd");e.duration(0).headerEnabled(!1).valueFormatter(function(a,b){return c.valueFormat()(a,b)});var s=a.utils.renderWatch(r),t=function(a){return function(){return{active:a.map(function(a){return!a.disabled})}}},u=function(a){return function(b){void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}};return c.dispatch.on("elementMouseover.tooltip",function(a){a.series={key:b.x()(a.data),value:b.y()(a.data),color:a.color,percent:a.percent},j||(delete a.percent,delete a.series.percent),e.data(a).hidden(!1)}),c.dispatch.on("elementMouseout.tooltip",function(a){e.hidden(!0)}),c.dispatch.on("elementMousemove.tooltip",function(a){e()}),b.legend=d,b.dispatch=r,b.pie=c,b.tooltip=e,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return h},set:function(a){h=a}},height:{get:function(){return i},set:function(a){i=a}},noData:{get:function(){return p},set:function(a){p=a}},showTooltipPercent:{get:function(){return j},set:function(a){j=a}},showLegend:{get:function(){return k},set:function(a){k=a}},legendPosition:{get:function(){return l},set:function(a){l=a}},defaultState:{get:function(){return o},set:function(a){o=a}},color:{get:function(){return m},set:function(a){m=a,d.color(m),c.color(m)}},duration:{get:function(){return q},set:function(a){q=a,s.reset(q),c.duration(q)}},margin:{get:function(){return f},set:function(a){void 0!==a.top&&(f.top=a.top,g=a.top),f.right=void 0!==a.right?a.right:f.right,f.bottom=void 0!==a.bottom?a.bottom:f.bottom,f.left=void 0!==a.left?a.left:f.left}}}),a.utils.inheritOptions(b,c),a.utils.initOptions(b),b},a.models.sankey=function(){"use strict";function b(){n.forEach(function(a){a.sourceLinks=[],a.targetLinks=[]}),o.forEach(function(a){var b=a.source,c=a.target;"number"==typeof b&&(b=a.source=n[a.source]),"number"==typeof c&&(c=a.target=n[a.target]),b.sourceLinks.push(a),c.targetLinks.push(a)})}function c(){n.forEach(function(a){a.value=Math.max(d3.sum(a.sourceLinks,i),d3.sum(a.targetLinks,i))})}function d(){for(var a,b=n,c=0;b.length&&c<n.length;)a=[],b.forEach(function(b){b.x=c,b.dx=k,b.sourceLinks.forEach(function(b){a.indexOf(b.target)<0&&a.push(b.target)})}),b=a,++c;p&&e(c),f((m[0]-k)/(c-1))}function e(a){n.forEach(function(b){b.sourceLinks.length||(b.x=a-1)})}function f(a){n.forEach(function(b){b.x*=a})}function g(a){function b(){var a=d3.min(g,function(a){return(m[1]-(a.length-1)*l)/d3.sum(a,i)});g.forEach(function(b){b.forEach(function(b,c){b.y=c,b.dy=b.value*a})}),o.forEach(function(b){b.dy=b.value*a})}function c(a){function b(a){return(a.source.y+a.sy+a.dy/2)*a.value}g.forEach(function(c,d){c.forEach(function(c){if(c.targetLinks.length){var d=d3.sum(c.targetLinks,b)/d3.sum(c.targetLinks,i);c.y+=(d-t(c))*a}})})}function d(a){function b(a){return(a.target.y+a.ty+a.dy/2)*a.value}g.slice().reverse().forEach(function(c){c.forEach(function(c){if(c.sourceLinks.length){var d=d3.sum(c.sourceLinks,b)/d3.sum(c.sourceLinks,i);c.y+=(d-t(c))*a}})})}function e(){g.forEach(function(a){var b,c,d,e=0,g=a.length;for(a.sort(f),d=0;g>d;++d)b=a[d],c=e-b.y,c>0&&(b.y+=c),e=b.y+b.dy+l;if(c=e-l-m[1],c>0)for(e=b.y-=c,d=g-2;d>=0;--d)b=a[d],c=b.y+b.dy+l-e,c>0&&(b.y-=c),e=b.y})}function f(a,b){return a.y-b.y}var g=d3.nest().key(function(a){return a.x}).sortKeys(d3.ascending).entries(n).map(function(a){return a.values});b(),e(),h();for(var j=1;a>0;--a)d(j*=.99),e(),h(),c(j),e(),h()}function h(){function a(a,b){return a.source.y-b.source.y}function b(a,b){return a.target.y-b.target.y}n.forEach(function(c){c.sourceLinks.sort(b),c.targetLinks.sort(a)}),n.forEach(function(a){var b=0,c=0;a.sourceLinks.forEach(function(a){a.sy=b,b+=a.dy}),a.targetLinks.forEach(function(a){a.ty=c,c+=a.dy})})}function i(a){return a.value}var j={},k=24,l=8,m=[1,1],n=[],o=[],p=!0,q=function(a){b(),c(),d(),g(a)},r=function(){h()},s=function(){function a(a){var c=a.source.x+a.source.dx,d=a.target.x,e=d3.interpolateNumber(c,d),f=e(b),g=e(1-b),h=a.source.y+a.sy+a.dy/2,i=a.target.y+a.ty+a.dy/2,j="M"+c+","+h+"C"+f+","+h+" "+g+","+i+" "+d+","+i;return j}var b=.5;return a.curvature=function(c){return arguments.length?(b=+c,a):b},a},t=function(a){return a.y+a.dy/2};return j.options=a.utils.optionsFunc.bind(j),j._options=Object.create({},{nodeWidth:{get:function(){return k},set:function(a){k=+a}},nodePadding:{get:function(){return l},set:function(a){l=a}},nodes:{get:function(){return n},set:function(a){n=a}},links:{get:function(){return o},set:function(a){o=a}},size:{get:function(){return m},set:function(a){m=a}},sinksRight:{get:function(){return p},set:function(a){p=a}},layout:{get:function(){q(32)},set:function(a){q(a)}},relayout:{get:function(){r()},set:function(a){}},center:{get:function(){return t()},set:function(a){"function"==typeof a&&(t=a)}},link:{get:function(){return s()},set:function(a){return"function"==typeof a&&(s=a),s()}}}),a.utils.initOptions(j),j},a.models.sankeyChart=function(){"use strict";function b(a){return a.each(function(b){function c(a){d3.select(this).attr("transform","translate("+a.x+","+(a.y=Math.max(0,Math.min(f-a.dy,d3.event.y)))+")"),d.relayout(),t.attr("d",s)}var i={nodes:[{node:1,name:"Test 1"},{node:2,name:"Test 2"},{node:3,name:"Test 3"},{node:4,name:"Test 4"},{node:5,name:"Test 5"},{node:6,name:"Test 6"}],links:[{source:0,target:1,value:2295},{source:0,target:5,value:1199},{source:1,target:2,value:1119},{source:1,target:5,value:1176},{source:2,target:3,value:487},{source:2,target:5,value:632},{source:3,target:4,value:301},{source:3,target:5,value:186}]},k=!1,l=!1;if(("object"==typeof b.nodes&&b.nodes.length)>=0&&("object"==typeof b.links&&b.links.length)>=0&&(k=!0),b.nodes&&b.nodes.length>0&&b.links&&b.links.length>0&&(l=!0),!k)return console.error("NVD3 Sankey chart error:","invalid data format for",b),console.info("Valid data format is: ",i,JSON.stringify(i)),r(a,"Error loading chart, data is invalid"),!1;if(!l)return r(a,"No data available"),!1;var m=a.append("svg").attr("width",e).attr("height",f).append("g").attr("class","nvd3 nv-wrap nv-sankeyChart");d.nodeWidth(g).nodePadding(h).size([e,f]);var s=d.link();d.nodes(b.nodes).links(b.links).layout(32).center(j);var t=m.append("g").selectAll(".link").data(b.links).enter().append("path").attr("class","link").attr("d",s).style("stroke-width",function(a){return Math.max(1,a.dy)}).sort(function(a,b){return b.dy-a.dy});t.append("title").text(n);var u=m.append("g").selectAll(".node").data(b.nodes).enter().append("g").attr("class","node").attr("transform",function(a){return"translate("+a.x+","+a.y+")"}).call(d3.behavior.drag().origin(function(a){return a}).on("dragstart",function(){this.parentNode.appendChild(this)}).on("drag",c));u.append("rect").attr("height",function(a){return a.dy}).attr("width",d.nodeWidth()).style("fill",o).style("stroke",p).append("title").text(q),u.append("text").attr("x",-6).attr("y",function(a){return a.dy/2}).attr("dy",".35em").attr("text-anchor","end").attr("transform",null).text(function(a){return a.name}).filter(function(a){return a.x<e/2}).attr("x",6+d.nodeWidth()).attr("text-anchor","start")}),b}var c={top:5,right:0,bottom:5,left:0},d=a.models.sankey(),e=600,f=400,g=36,h=40,i="units",j=void 0,k=d3.format(",.0f"),l=function(a){return k(a)+" "+i},m=d3.scale.category20(),n=function(a){return a.source.name+" → "+a.target.name+"\n"+l(a.value)},o=function(a){return a.color=m(a.name.replace(/ .*/,""))},p=function(a){return d3.rgb(a.color).darker(2)},q=function(a){return a.name+"\n"+l(a.value)},r=function(a,b){a.append("text").attr("x",0).attr("y",0).attr("class","nvd3-sankey-chart-error").attr("text-anchor","middle").text(b)};return b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{units:{get:function(){return i},set:function(a){i=a}},width:{get:function(){return e},set:function(a){e=a}},height:{get:function(){return f},set:function(a){f=a}},format:{get:function(){return l},set:function(a){l=a}},linkTitle:{get:function(){return n},set:function(a){n=a}},nodeWidth:{get:function(){return g},set:function(a){g=a}},nodePadding:{get:function(){return h},set:function(a){h=a}},center:{get:function(){return j},set:function(a){j=a}},margin:{get:function(){return c},set:function(a){c.top=void 0!==a.top?a.top:c.top,c.right=void 0!==a.right?a.right:c.right,c.bottom=void 0!==a.bottom?a.bottom:c.bottom,c.left=void 0!==a.left?a.left:c.left}},nodeStyle:{get:function(){return{}},set:function(a){o=void 0!==a.fillColor?a.fillColor:o,p=void 0!==a.strokeColor?a.strokeColor:p,q=void 0!==a.title?a.title:q}}}),a.utils.initOptions(b),b},a.models.scatter=function(){"use strict";function b(a){var b,c;return b=a[0].series+":"+a[1],c=Z[b]=Z[b]||{}}function c(a){var b;b=a[0].series+":"+a[1],delete Z[b]}function d(a){var c,d,e,f=b(a),g=!1;for(c=1;c<arguments.length;c+=2)d=arguments[c],e=arguments[c+1](a[0],a[1]),f[d]===e&&f.hasOwnProperty(d)||(f[d]=e,g=!0);return g}function e(b){return X.reset(),b.each(function(b){function T(){if(W=!1,!C)return!1;if(S===!0){var c=d3.merge(b.map(function(b,c){return b.values.map(function(b,d){var e=v(b,d),f=w(b,d);return[a.utils.NaNtoZero(s(e))+1e-4*Math.random(),a.utils.NaNtoZero(t(f))+1e-4*Math.random(),c,d,b]}).filter(function(a,b){return D(a[4],b)})}));if(0==c.length)return!1;c.length<3&&(c.push([s.range()[0]-20,t.range()[0]-20,null,null]),c.push([s.range()[1]+20,t.range()[1]+20,null,null]),c.push([s.range()[0]-20,t.range()[0]+20,null,null]),c.push([s.range()[1]+20,t.range()[1]-20,null,null]));var d=d3.geom.polygon([[-10,-10],[-10,n+10],[m+10,n+10],[m+10,-10]]),e=d3.geom.voronoi(c).map(function(a,b){return{data:d.clip(a),series:c[b][2],point:c[b][3]}});ea.select(".nv-point-paths").selectAll("path").remove();var f=ea.select(".nv-point-paths").selectAll("path").data(e),g=f.enter().append("svg:path").attr("d",function(a){return a&&a.data&&0!==a.data.length?"M"+a.data.join(",")+"Z":"M 0 0"}).attr("id",function(a,b){return"nv-path-"+b}).attr("clip-path",function(a,b){return"url(#nv-clip-"+q+"-"+b+")"});if(I&&g.style("fill",d3.rgb(230,230,230)).style("fill-opacity",.4).style("stroke-opacity",1).style("stroke",d3.rgb(200,200,200)),H){ea.select(".nv-point-clips").selectAll("*").remove();var h=ea.select(".nv-point-clips").selectAll("clipPath").data(c);h.enter().append("svg:clipPath").attr("id",function(a,b){return"nv-clip-"+q+"-"+b}).append("svg:circle").attr("cx",function(a){return a[0]}).attr("cy",function(a){return a[1]}).attr("r",J)}var i=function(a,c,d){if(W)return 0;var e=b[c.series];if(void 0!==e){var f=e.values[c.point];f.color=o(e,c.series),f.x=v(f),f.y=w(f);var g=r.node().getBoundingClientRect(),h=window.pageYOffset||document.documentElement.scrollTop,i=window.pageXOffset||document.documentElement.scrollLeft,j={left:s(v(f,c.point))+g.left+i+l.left+10,top:t(w(f,c.point))+g.top+h+l.top+10};d({point:f,series:e,pos:j,relativePos:[s(v(f,c.point))+l.left,t(w(f,c.point))+l.top],seriesIndex:c.series,pointIndex:c.point,event:d3.event,element:a})}};f.on("click",function(a){i(this,a,R.elementClick)}).on("dblclick",function(a){i(this,a,R.elementDblClick)}).on("mouseover",function(a){i(this,a,R.elementMouseover)}).on("mouseout",function(a,b){i(this,a,R.elementMouseout)})}else ea.select(".nv-groups").selectAll(".nv-group").selectAll(".nv-point").on("click",function(a,c){if(W||!b[a.series])return 0;var d=b[a.series],e=d.values[c],f=this;R.elementClick({point:e,series:d,pos:[s(v(e,c))+l.left,t(w(e,c))+l.top],relativePos:[s(v(e,c))+l.left,t(w(e,c))+l.top],seriesIndex:a.series,pointIndex:c,event:d3.event,element:f})}).on("dblclick",function(a,c){if(W||!b[a.series])return 0;var d=b[a.series],e=d.values[c];R.elementDblClick({point:e,series:d,pos:[s(v(e,c))+l.left,t(w(e,c))+l.top],relativePos:[s(v(e,c))+l.left,t(w(e,c))+l.top],seriesIndex:a.series,pointIndex:c})}).on("mouseover",function(a,c){if(W||!b[a.series])return 0;var d=b[a.series],e=d.values[c];R.elementMouseover({point:e,series:d,pos:[s(v(e,c))+l.left,t(w(e,c))+l.top],relativePos:[s(v(e,c))+l.left,t(w(e,c))+l.top],seriesIndex:a.series,pointIndex:c,color:o(a,c)})}).on("mouseout",function(a,c){if(W||!b[a.series])return 0;var d=b[a.series],e=d.values[c];R.elementMouseout({point:e,series:d,pos:[s(v(e,c))+l.left,t(w(e,c))+l.top],relativePos:[s(v(e,c))+l.left,t(w(e,c))+l.top],seriesIndex:a.series,pointIndex:c,color:o(a,c)})})}r=d3.select(this);var Z=a.utils.availableWidth(m,r,l),$=a.utils.availableHeight(n,r,l);a.utils.initSVG(r),b.forEach(function(a,b){a.values.forEach(function(a){a.series=b})});var _=e.yScale().name===d3.scale.log().name?!0:!1,aa=K&&L&&O?[]:d3.merge(b.map(function(a){return a.values.map(function(a,b){return{x:v(a,b),y:w(a,b),size:x(a,b)}})}));if(s.domain(K||d3.extent(aa.map(function(a){return a.x}).concat(z))),E&&b[0]?s.range(M||[(Z*F+Z)/(2*b[0].values.length),Z-Z*(1+F)/(2*b[0].values.length)]):s.range(M||[0,Z]),_){var ba=d3.min(aa.map(function(a){return 0!==a.y?a.y:void 0}));t.clamp(!0).domain(L||d3.extent(aa.map(function(a){return 0!==a.y?a.y:.1*ba}).concat(A))).range(N||[$,0])}else t.domain(L||d3.extent(aa.map(function(a){return a.y}).concat(A))).range(N||[$,0]);u.domain(O||d3.extent(aa.map(function(a){return a.size}).concat(B))).range(P||Y),Q=s.domain()[0]===s.domain()[1]||t.domain()[0]===t.domain()[1],s.domain()[0]===s.domain()[1]&&(s.domain()[0]?s.domain([s.domain()[0]-.01*s.domain()[0],s.domain()[1]+.01*s.domain()[1]]):s.domain([-1,1])),t.domain()[0]===t.domain()[1]&&(t.domain()[0]?t.domain([t.domain()[0]-.01*t.domain()[0],t.domain()[1]+.01*t.domain()[1]]):t.domain([-1,1])),isNaN(s.domain()[0])&&s.domain([-1,1]),isNaN(t.domain()[0])&&t.domain([-1,1]),f=f||s,g=g||t,h=h||u;var ca=s(1)!==f(1)||t(1)!==g(1)||u(1)!==h(1);i=i||m,j=j||n;var da=i!==m||j!==n,ea=r.selectAll("g.nv-wrap.nv-scatter").data([b]),fa=ea.enter().append("g").attr("class","nvd3 nv-wrap nv-scatter nv-chart-"+q),ga=fa.append("defs"),ha=fa.append("g"),ia=ea.select("g");ea.classed("nv-single-point",Q),ha.append("g").attr("class","nv-groups"),ha.append("g").attr("class","nv-point-paths"),fa.append("g").attr("class","nv-point-clips"),ea.attr("transform","translate("+l.left+","+l.top+")"),ga.append("clipPath").attr("id","nv-edge-clip-"+q).append("rect").attr("transform","translate( -10, -10)"),ea.select("#nv-edge-clip-"+q+" rect").attr("width",Z+20).attr("height",$>0?$+20:0),ia.attr("clip-path",G?"url(#nv-edge-clip-"+q+")":""),W=!0;var ja=ea.select(".nv-groups").selectAll(".nv-group").data(function(a){return a},function(a){return a.key});ja.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),ja.exit().remove(),ja.attr("class",function(a,b){return(a.classed||"")+" nv-group nv-series-"+b}).classed("nv-noninteractive",!C).classed("hover",function(a){return a.hover}),ja.watchTransition(X,"scatter: groups").style("fill",function(a,b){return o(a,b)}).style("stroke",function(a,b){return a.pointBorderColor||p||o(a,b)}).style("stroke-opacity",1).style("fill-opacity",.5);var ka=ja.selectAll("path.nv-point").data(function(a){return a.values.map(function(a,b){return[a,b]}).filter(function(a,b){return D(a[0],b)})});if(ka.enter().append("path").attr("class",function(a){return"nv-point nv-point-"+a[1]}).style("fill",function(a){return a.color}).style("stroke",function(a){return a.color}).attr("transform",function(b){return"translate("+a.utils.NaNtoZero(f(v(b[0],b[1])))+","+a.utils.NaNtoZero(g(w(b[0],b[1])))+")"}).attr("d",a.utils.symbol().type(function(a){return y(a[0])}).size(function(a){return u(x(a[0],a[1]))})),ka.exit().each(c).remove(),ja.exit().selectAll("path.nv-point").watchTransition(X,"scatter exit").attr("transform",function(b){return"translate("+a.utils.NaNtoZero(s(v(b[0],b[1])))+","+a.utils.NaNtoZero(t(w(b[0],b[1])))+")"}).remove(),ka.filter(function(a){return ca||da||d(a,"x",v,"y",w)}).watchTransition(X,"scatter points").attr("transform",function(b){return"translate("+a.utils.NaNtoZero(s(v(b[0],b[1])))+","+a.utils.NaNtoZero(t(w(b[0],b[1])))+")"}),ka.filter(function(a){return ca||da||d(a,"shape",y,"size",x)}).watchTransition(X,"scatter points").attr("d",a.utils.symbol().type(function(a){return y(a[0])}).size(function(a){return u(x(a[0],a[1]))})),V){var la=ja.selectAll(".nv-label").data(function(a){return a.values.map(function(a,b){return[a,b]}).filter(function(a,b){return D(a[0],b)})});la.enter().append("text").style("fill",function(a,b){return a.color}).style("stroke-opacity",0).style("fill-opacity",1).attr("transform",function(b){var c=a.utils.NaNtoZero(f(v(b[0],b[1])))+Math.sqrt(u(x(b[0],b[1]))/Math.PI)+2;return"translate("+c+","+a.utils.NaNtoZero(g(w(b[0],b[1])))+")"}).text(function(a,b){return a[0].label}),la.exit().remove(),ja.exit().selectAll("path.nv-label").watchTransition(X,"scatter exit").attr("transform",function(b){var c=a.utils.NaNtoZero(s(v(b[0],b[1])))+Math.sqrt(u(x(b[0],b[1]))/Math.PI)+2;return"translate("+c+","+a.utils.NaNtoZero(t(w(b[0],b[1])))+")"}).remove(),la.each(function(a){d3.select(this).classed("nv-label",!0).classed("nv-label-"+a[1],!1).classed("hover",!1)}),la.watchTransition(X,"scatter labels").attr("transform",function(b){var c=a.utils.NaNtoZero(s(v(b[0],b[1])))+Math.sqrt(u(x(b[0],b[1]))/Math.PI)+2;return"translate("+c+","+a.utils.NaNtoZero(t(w(b[0],b[1])))+")"})}U?(clearTimeout(k),k=setTimeout(T,U)):T(),f=s.copy(),g=t.copy(),h=u.copy(),i=m,j=n}),X.renderEnd("scatter immediate"),e}var f,g,h,i,j,k,l={top:0,right:0,bottom:0,left:0},m=null,n=null,o=a.utils.defaultColor(),p=null,q=Math.floor(1e5*Math.random()),r=null,s=d3.scale.linear(),t=d3.scale.linear(),u=d3.scale.linear(),v=function(a){return a.x},w=function(a){return a.y},x=function(a){return a.size||1},y=function(a){return a.shape||"circle"},z=[],A=[],B=[],C=!0,D=function(a){return!a.notActive},E=!1,F=.1,G=!1,H=!0,I=!1,J=function(){return 25},K=null,L=null,M=null,N=null,O=null,P=null,Q=!1,R=d3.dispatch("elementClick","elementDblClick","elementMouseover","elementMouseout","renderEnd"),S=!0,T=250,U=300,V=!1,W=!1,X=a.utils.renderWatch(R,T),Y=[16,256],Z={};return e.dispatch=R,e.options=a.utils.optionsFunc.bind(e),e._calls=new function(){this.clearHighlights=function(){return a.dom.write(function(){r.selectAll(".nv-point.hover").classed("hover",!1)}),null},this.highlightPoint=function(b,c,d){a.dom.write(function(){r.select(".nv-groups").selectAll(".nv-series-"+b).selectAll(".nv-point-"+c).classed("hover",d)})}},R.on("elementMouseover.point",function(a){C&&e._calls.highlightPoint(a.seriesIndex,a.pointIndex,!0)}),R.on("elementMouseout.point",function(a){C&&e._calls.highlightPoint(a.seriesIndex,a.pointIndex,!1)}),e._options=Object.create({},{width:{get:function(){return m},set:function(a){m=a}},height:{get:function(){return n},set:function(a){n=a}},xScale:{get:function(){return s},set:function(a){s=a}},yScale:{get:function(){return t},set:function(a){t=a}},pointScale:{get:function(){return u},set:function(a){u=a}},xDomain:{get:function(){return K},set:function(a){K=a}},yDomain:{get:function(){return L},set:function(a){L=a}},pointDomain:{get:function(){return O},set:function(a){O=a}},xRange:{get:function(){return M},set:function(a){M=a}},yRange:{get:function(){return N},set:function(a){N=a}},pointRange:{get:function(){return P},set:function(a){P=a}},forceX:{get:function(){return z},set:function(a){z=a}},forceY:{get:function(){return A},set:function(a){A=a}},forcePoint:{get:function(){return B},set:function(a){B=a}},interactive:{get:function(){return C},set:function(a){C=a}},pointActive:{get:function(){return D},set:function(a){D=a}},padDataOuter:{get:function(){return F},set:function(a){F=a}},padData:{get:function(){return E},set:function(a){E=a}},clipEdge:{get:function(){return G},set:function(a){G=a}},clipVoronoi:{get:function(){return H},set:function(a){H=a}},clipRadius:{get:function(){return J},set:function(a){J=a}},showVoronoi:{get:function(){return I},set:function(a){I=a}},id:{get:function(){return q},set:function(a){q=a}},interactiveUpdateDelay:{get:function(){return U},set:function(a){U=a}},showLabels:{get:function(){return V},set:function(a){V=a}},pointBorderColor:{get:function(){return p},set:function(a){p=a}},x:{get:function(){return v},set:function(a){v=d3.functor(a)}},y:{get:function(){return w},set:function(a){w=d3.functor(a)}},pointSize:{get:function(){return x},set:function(a){x=d3.functor(a)}},pointShape:{get:function(){return y},set:function(a){y=d3.functor(a)}},margin:{get:function(){return l},set:function(a){l.top=void 0!==a.top?a.top:l.top,l.right=void 0!==a.right?a.right:l.right,l.bottom=void 0!==a.bottom?a.bottom:l.bottom,l.left=void 0!==a.left?a.left:l.left}},duration:{get:function(){return T},set:function(a){T=a,X.reset(T)}},color:{get:function(){return o},set:function(b){o=a.utils.getColor(b)}},useVoronoi:{get:function(){return S},set:function(a){S=a,S===!1&&(H=!1)}}}),a.utils.initOptions(e),e},a.models.scatterChart=function(){"use strict";function b(A){return F.reset(),F.models(c),u&&F.models(d),v&&F.models(e),r&&F.models(g),s&&F.models(h),A.each(function(A){n=d3.select(this),a.utils.initSVG(n);var I=a.utils.availableWidth(l,n,j),J=a.utils.availableHeight(m,n,j);if(b.update=function(){0===B?n.call(b):n.transition().duration(B).call(b)},b.container=this,x.setter(H(A),b.update).getter(G(A)).update(),x.disabled=A.map(function(a){return!!a.disabled}),!y){var K;y={};for(K in x)x[K]instanceof Array?y[K]=x[K].slice(0):y[K]=x[K]}if(!(A&&A.length&&A.filter(function(a){return a.values.length}).length))return a.utils.noData(b,n),F.renderEnd("scatter immediate"),b;n.selectAll(".nv-noData").remove(),p=c.xScale(),q=c.yScale();var L=n.selectAll("g.nv-wrap.nv-scatterChart").data([A]),M=L.enter().append("g").attr("class","nvd3 nv-wrap nv-scatterChart nv-chart-"+c.id()),N=M.append("g"),O=L.select("g");if(N.append("rect").attr("class","nvd3 nv-background").style("pointer-events","none"),N.append("g").attr("class","nv-x nv-axis"),N.append("g").attr("class","nv-y nv-axis"),N.append("g").attr("class","nv-scatterWrap"),N.append("g").attr("class","nv-regressionLinesWrap"),N.append("g").attr("class","nv-distWrap"),N.append("g").attr("class","nv-legendWrap"),w&&O.select(".nv-y.nv-axis").attr("transform","translate("+I+",0)"),t){var P=I;f.width(P),L.select(".nv-legendWrap").datum(A).call(f),k||f.height()===j.top||(j.top=f.height(),J=a.utils.availableHeight(m,n,j)),L.select(".nv-legendWrap").attr("transform","translate(0,"+-j.top+")")}else O.select(".nv-legendWrap").selectAll("*").remove();L.attr("transform","translate("+j.left+","+j.top+")"),c.width(I).height(J).color(A.map(function(a,b){return a.color=a.color||o(a,b),a.color}).filter(function(a,b){return!A[b].disabled})).showLabels(C),L.select(".nv-scatterWrap").datum(A.filter(function(a){return!a.disabled})).call(c),L.select(".nv-regressionLinesWrap").attr("clip-path","url(#nv-edge-clip-"+c.id()+")");var Q=L.select(".nv-regressionLinesWrap").selectAll(".nv-regLines").data(function(a){return a});Q.enter().append("g").attr("class","nv-regLines");var R=Q.selectAll(".nv-regLine").data(function(a){return[a]});R.enter().append("line").attr("class","nv-regLine").style("stroke-opacity",0),R.filter(function(a){return a.intercept&&a.slope}).watchTransition(F,"scatterPlusLineChart: regline").attr("x1",p.range()[0]).attr("x2",p.range()[1]).attr("y1",function(a,b){return q(p.domain()[0]*a.slope+a.intercept)}).attr("y2",function(a,b){return q(p.domain()[1]*a.slope+a.intercept)}).style("stroke",function(a,b,c){return o(a,c)}).style("stroke-opacity",function(a,b){return a.disabled||"undefined"==typeof a.slope||"undefined"==typeof a.intercept?0:1}),u&&(d.scale(p)._ticks(a.utils.calcTicksX(I/100,A)).tickSize(-J,0),O.select(".nv-x.nv-axis").attr("transform","translate(0,"+q.range()[0]+")").call(d)),v&&(e.scale(q)._ticks(a.utils.calcTicksY(J/36,A)).tickSize(-I,0),O.select(".nv-y.nv-axis").call(e)),r&&(g.getData(c.x()).scale(p).width(I).color(A.map(function(a,b){return a.color||o(a,b)}).filter(function(a,b){return!A[b].disabled})),N.select(".nv-distWrap").append("g").attr("class","nv-distributionX"),O.select(".nv-distributionX").attr("transform","translate(0,"+q.range()[0]+")").datum(A.filter(function(a){return!a.disabled})).call(g)),s&&(h.getData(c.y()).scale(q).width(J).color(A.map(function(a,b){return a.color||o(a,b)}).filter(function(a,b){return!A[b].disabled})),N.select(".nv-distWrap").append("g").attr("class","nv-distributionY"),O.select(".nv-distributionY").attr("transform","translate("+(w?I:-h.size())+",0)").datum(A.filter(function(a){return!a.disabled})).call(h)),f.dispatch.on("stateChange",function(a){for(var c in a)x[c]=a[c];z.stateChange(x),b.update()}),z.on("changeState",function(a){"undefined"!=typeof a.disabled&&(A.forEach(function(b,c){b.disabled=a.disabled[c]}),x.disabled=a.disabled),b.update()}),c.dispatch.on("elementMouseout.tooltip",function(a){i.hidden(!0),n.select(".nv-chart-"+c.id()+" .nv-series-"+a.seriesIndex+" .nv-distx-"+a.pointIndex).attr("y1",0),n.select(".nv-chart-"+c.id()+" .nv-series-"+a.seriesIndex+" .nv-disty-"+a.pointIndex).attr("x2",h.size())}),c.dispatch.on("elementMouseover.tooltip",function(a){n.select(".nv-series-"+a.seriesIndex+" .nv-distx-"+a.pointIndex).attr("y1",a.relativePos[1]-J),n.select(".nv-series-"+a.seriesIndex+" .nv-disty-"+a.pointIndex).attr("x2",a.relativePos[0]+g.size()),i.data(a).hidden(!1)}),D=p.copy(),E=q.copy()}),F.renderEnd("scatter with line immediate"),b}var c=a.models.scatter(),d=a.models.axis(),e=a.models.axis(),f=a.models.legend(),g=a.models.distribution(),h=a.models.distribution(),i=a.models.tooltip(),j={top:30,right:20,bottom:50,left:75},k=null,l=null,m=null,n=null,o=a.utils.defaultColor(),p=c.xScale(),q=c.yScale(),r=!1,s=!1,t=!0,u=!0,v=!0,w=!1,x=a.utils.state(),y=null,z=d3.dispatch("stateChange","changeState","renderEnd"),A=null,B=250,C=!1;c.xScale(p).yScale(q),d.orient("bottom").tickPadding(10),e.orient(w?"right":"left").tickPadding(10),g.axis("x"),h.axis("y"),i.headerFormatter(function(a,b){return d.tickFormat()(a,b)}).valueFormatter(function(a,b){return e.tickFormat()(a,b)});var D,E,F=a.utils.renderWatch(z,B),G=function(a){return function(){return{active:a.map(function(a){return!a.disabled})}}},H=function(a){return function(b){void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}};return b.dispatch=z,b.scatter=c,b.legend=f,b.xAxis=d,b.yAxis=e,b.distX=g,b.distY=h,b.tooltip=i,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return l},set:function(a){l=a}},height:{get:function(){return m},set:function(a){m=a}},container:{get:function(){return n},set:function(a){n=a}},showDistX:{get:function(){return r},set:function(a){r=a}},showDistY:{get:function(){return s},set:function(a){s=a}},showLegend:{get:function(){return t},set:function(a){t=a}},showXAxis:{get:function(){return u},set:function(a){u=a}},showYAxis:{get:function(){return v},set:function(a){v=a}},defaultState:{get:function(){return y},set:function(a){y=a}},noData:{get:function(){return A},set:function(a){A=a}},duration:{get:function(){return B},set:function(a){B=a}},showLabels:{get:function(){return C},set:function(a){C=a}},margin:{get:function(){return j},set:function(a){void 0!==a.top&&(j.top=a.top,k=a.top),j.right=void 0!==a.right?a.right:j.right,j.bottom=void 0!==a.bottom?a.bottom:j.bottom,j.left=void 0!==a.left?a.left:j.left}},rightAlignYAxis:{get:function(){return w},set:function(a){w=a,e.orient(a?"right":"left")}},color:{get:function(){return o},set:function(b){o=a.utils.getColor(b),f.color(o),g.color(o),h.color(o)}}}),a.utils.inheritOptions(b,c),a.utils.initOptions(b),b},a.models.sparkline=function(){"use strict";function b(k){return t.reset(),k.each(function(b){var k=h-g.left-g.right,s=i-g.top-g.bottom;j=d3.select(this),a.utils.initSVG(j),l.domain(c||d3.extent(b,n)).range(e||[0,k]),m.domain(d||d3.extent(b,o)).range(f||[s,0]);var t=j.selectAll("g.nv-wrap.nv-sparkline").data([b]),u=t.enter().append("g").attr("class","nvd3 nv-wrap nv-sparkline");u.append("g"),t.select("g");t.attr("transform","translate("+g.left+","+g.top+")");var v=t.selectAll("path").data(function(a){return[a]});v.enter().append("path"),v.exit().remove(),v.style("stroke",function(a,b){return a.color||p(a,b)}).attr("d",d3.svg.line().x(function(a,b){return l(n(a,b))}).y(function(a,b){return m(o(a,b))}));var w=t.selectAll("circle.nv-point").data(function(a){function b(b){if(-1!=b){var c=a[b];return c.pointIndex=b,c}return null}var c=a.map(function(a,b){return o(a,b)}),d=b(c.lastIndexOf(m.domain()[1])),e=b(c.indexOf(m.domain()[0])),f=b(c.length-1);return[q?e:null,q?d:null,r?f:null].filter(function(a){return null!=a})});w.enter().append("circle"),w.exit().remove(),w.attr("cx",function(a,b){return l(n(a,a.pointIndex))}).attr("cy",function(a,b){return m(o(a,a.pointIndex))}).attr("r",2).attr("class",function(a,b){return n(a,a.pointIndex)==l.domain()[1]?"nv-point nv-currentValue":o(a,a.pointIndex)==m.domain()[0]?"nv-point nv-minValue":"nv-point nv-maxValue"})}),t.renderEnd("sparkline immediate"),b}var c,d,e,f,g={top:2,right:0,bottom:2,left:0},h=400,i=32,j=null,k=!0,l=d3.scale.linear(),m=d3.scale.linear(),n=function(a){return a.x},o=function(a){return a.y},p=a.utils.getColor(["#000"]),q=!0,r=!0,s=d3.dispatch("renderEnd"),t=a.utils.renderWatch(s);return b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return h},set:function(a){h=a}},height:{get:function(){return i},set:function(a){i=a}},xDomain:{get:function(){return c},set:function(a){c=a}},yDomain:{get:function(){return d},set:function(a){d=a}},xRange:{get:function(){return e},set:function(a){e=a}},yRange:{get:function(){return f},set:function(a){f=a}},xScale:{get:function(){return l},set:function(a){l=a}},yScale:{get:function(){return m},set:function(a){m=a}},animate:{get:function(){return k},set:function(a){k=a}},showMinMaxPoints:{get:function(){return q},set:function(a){q=a}},showCurrentPoint:{get:function(){return r},set:function(a){r=a}},x:{get:function(){return n},set:function(a){n=d3.functor(a)}},y:{get:function(){return o},set:function(a){o=d3.functor(a)}},margin:{get:function(){return g},set:function(a){g.top=void 0!==a.top?a.top:g.top,g.right=void 0!==a.right?a.right:g.right,g.bottom=void 0!==a.bottom?a.bottom:g.bottom,g.left=void 0!==a.left?a.left:g.left}},color:{get:function(){return p},set:function(b){p=a.utils.getColor(b)}}}),b.dispatch=s,a.utils.initOptions(b),b},a.models.sparklinePlus=function(){"use strict";function b(p){return r.reset(),r.models(e),p.each(function(p){function q(){if(!j){var a=z.selectAll(".nv-hoverValue").data(i),b=a.enter().append("g").attr("class","nv-hoverValue").style("stroke-opacity",0).style("fill-opacity",0);a.exit().transition().duration(250).style("stroke-opacity",0).style("fill-opacity",0).remove(),a.attr("transform",function(a){return"translate("+c(e.x()(p[a],a))+",0)"}).transition().duration(250).style("stroke-opacity",1).style("fill-opacity",1),i.length&&(b.append("line").attr("x1",0).attr("y1",-f.top).attr("x2",0).attr("y2",u),
+b.append("text").attr("class","nv-xValue").attr("x",-6).attr("y",-f.top).attr("text-anchor","end").attr("dy",".9em"),z.select(".nv-hoverValue .nv-xValue").text(k(e.x()(p[i[0]],i[0]))),b.append("text").attr("class","nv-yValue").attr("x",6).attr("y",-f.top).attr("text-anchor","start").attr("dy",".9em"),z.select(".nv-hoverValue .nv-yValue").text(l(e.y()(p[i[0]],i[0]))))}}function r(){function a(a,b){for(var c=Math.abs(e.x()(a[0],0)-b),d=0,f=0;f<a.length;f++)Math.abs(e.x()(a[f],f)-b)<c&&(c=Math.abs(e.x()(a[f],f)-b),d=f);return d}if(!j){var b=d3.mouse(this)[0]-f.left;i=[a(p,Math.round(c.invert(b)))],q()}}var s=d3.select(this);a.utils.initSVG(s);var t=a.utils.availableWidth(g,s,f),u=a.utils.availableHeight(h,s,f);if(b.update=function(){s.call(b)},b.container=this,!p||!p.length)return a.utils.noData(b,s),b;s.selectAll(".nv-noData").remove();var v=e.y()(p[p.length-1],p.length-1);c=e.xScale(),d=e.yScale();var w=s.selectAll("g.nv-wrap.nv-sparklineplus").data([p]),x=w.enter().append("g").attr("class","nvd3 nv-wrap nv-sparklineplus"),y=x.append("g"),z=w.select("g");y.append("g").attr("class","nv-sparklineWrap"),y.append("g").attr("class","nv-valueWrap"),y.append("g").attr("class","nv-hoverArea"),w.attr("transform","translate("+f.left+","+f.top+")");var A=z.select(".nv-sparklineWrap");if(e.width(t).height(u),A.call(e),m){var B=z.select(".nv-valueWrap"),C=B.selectAll(".nv-currentValue").data([v]);C.enter().append("text").attr("class","nv-currentValue").attr("dx",o?-8:8).attr("dy",".9em").style("text-anchor",o?"end":"start"),C.attr("x",t+(o?f.right:0)).attr("y",n?function(a){return d(a)}:0).style("fill",e.color()(p[p.length-1],p.length-1)).text(l(v))}y.select(".nv-hoverArea").append("rect").on("mousemove",r).on("click",function(){j=!j}).on("mouseout",function(){i=[],q()}),z.select(".nv-hoverArea rect").attr("transform",function(a){return"translate("+-f.left+","+-f.top+")"}).attr("width",t+f.left+f.right).attr("height",u+f.top)}),r.renderEnd("sparklinePlus immediate"),b}var c,d,e=a.models.sparkline(),f={top:15,right:100,bottom:10,left:50},g=null,h=null,i=[],j=!1,k=d3.format(",r"),l=d3.format(",.2f"),m=!0,n=!0,o=!1,p=null,q=d3.dispatch("renderEnd"),r=a.utils.renderWatch(q);return b.dispatch=q,b.sparkline=e,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return g},set:function(a){g=a}},height:{get:function(){return h},set:function(a){h=a}},xTickFormat:{get:function(){return k},set:function(a){k=a}},yTickFormat:{get:function(){return l},set:function(a){l=a}},showLastValue:{get:function(){return m},set:function(a){m=a}},alignValue:{get:function(){return n},set:function(a){n=a}},rightAlignValue:{get:function(){return o},set:function(a){o=a}},noData:{get:function(){return p},set:function(a){p=a}},margin:{get:function(){return f},set:function(a){f.top=void 0!==a.top?a.top:f.top,f.right=void 0!==a.right?a.right:f.right,f.bottom=void 0!==a.bottom?a.bottom:f.bottom,f.left=void 0!==a.left?a.left:f.left}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.stackedArea=function(){"use strict";function b(n){return v.reset(),v.models(s),n.each(function(n){var t=f-e.left-e.right,w=g-e.top-e.bottom;j=d3.select(this),a.utils.initSVG(j),c=s.xScale(),d=s.yScale();var x=n;n.forEach(function(a,b){a.seriesIndex=b,a.values=a.values.map(function(a,c){return a.index=c,a.seriesIndex=b,a})});var y=n.filter(function(a){return!a.disabled});n=d3.layout.stack().order(p).offset(o).values(function(a){return a.values}).x(k).y(l).out(function(a,b,c){a.display={y:c,y0:b}})(y);var z=j.selectAll("g.nv-wrap.nv-stackedarea").data([n]),A=z.enter().append("g").attr("class","nvd3 nv-wrap nv-stackedarea"),B=A.append("defs"),C=A.append("g"),D=z.select("g");C.append("g").attr("class","nv-areaWrap"),C.append("g").attr("class","nv-scatterWrap"),z.attr("transform","translate("+e.left+","+e.top+")"),0==s.forceY().length&&s.forceY().push(0),s.width(t).height(w).x(k).y(function(a){return void 0!==a.display?a.display.y+a.display.y0:void 0}).color(n.map(function(a,b){return a.color=a.color||h(a,a.seriesIndex),a.color}));var E=D.select(".nv-scatterWrap").datum(n);E.call(s),B.append("clipPath").attr("id","nv-edge-clip-"+i).append("rect"),z.select("#nv-edge-clip-"+i+" rect").attr("width",t).attr("height",w),D.attr("clip-path",r?"url(#nv-edge-clip-"+i+")":"");var F=d3.svg.area().defined(m).x(function(a,b){return c(k(a,b))}).y0(function(a){return d(a.display.y0)}).y1(function(a){return d(a.display.y+a.display.y0)}).interpolate(q),G=d3.svg.area().defined(m).x(function(a,b){return c(k(a,b))}).y0(function(a){return d(a.display.y0)}).y1(function(a){return d(a.display.y0)}),H=D.select(".nv-areaWrap").selectAll("path.nv-area").data(function(a){return a});H.enter().append("path").attr("class",function(a,b){return"nv-area nv-area-"+b}).attr("d",function(a,b){return G(a.values,a.seriesIndex)}).on("mouseover",function(a,b){d3.select(this).classed("hover",!0),u.areaMouseover({point:a,series:a.key,pos:[d3.event.pageX,d3.event.pageY],seriesIndex:a.seriesIndex})}).on("mouseout",function(a,b){d3.select(this).classed("hover",!1),u.areaMouseout({point:a,series:a.key,pos:[d3.event.pageX,d3.event.pageY],seriesIndex:a.seriesIndex})}).on("click",function(a,b){d3.select(this).classed("hover",!1),u.areaClick({point:a,series:a.key,pos:[d3.event.pageX,d3.event.pageY],seriesIndex:a.seriesIndex})}),H.exit().remove(),H.style("fill",function(a,b){return a.color||h(a,a.seriesIndex)}).style("stroke",function(a,b){return a.color||h(a,a.seriesIndex)}),H.watchTransition(v,"stackedArea path").attr("d",function(a,b){return F(a.values,b)}),s.dispatch.on("elementMouseover.area",function(a){D.select(".nv-chart-"+i+" .nv-area-"+a.seriesIndex).classed("hover",!0)}),s.dispatch.on("elementMouseout.area",function(a){D.select(".nv-chart-"+i+" .nv-area-"+a.seriesIndex).classed("hover",!1)}),b.d3_stackedOffset_stackPercent=function(a){var b,c,d,e=a.length,f=a[0].length,g=[];for(c=0;f>c;++c){for(b=0,d=0;b<x.length;b++)d+=l(x[b].values[c]);if(d)for(b=0;e>b;b++)a[b][c][1]/=d;else for(b=0;e>b;b++)a[b][c][1]=0}for(c=0;f>c;++c)g[c]=0;return g}}),v.renderEnd("stackedArea immediate"),b}var c,d,e={top:0,right:0,bottom:0,left:0},f=960,g=500,h=a.utils.defaultColor(),i=Math.floor(1e5*Math.random()),j=null,k=function(a){return a.x},l=function(a){return a.y},m=function(a,b){return!isNaN(l(a,b))&&null!==l(a,b)},n="stack",o="zero",p="default",q="linear",r=!1,s=a.models.scatter(),t=250,u=d3.dispatch("areaClick","areaMouseover","areaMouseout","renderEnd","elementClick","elementMouseover","elementMouseout");s.pointSize(2.2).pointDomain([2.2,2.2]);var v=a.utils.renderWatch(u,t);return b.dispatch=u,b.scatter=s,s.dispatch.on("elementClick",function(){u.elementClick.apply(this,arguments)}),s.dispatch.on("elementMouseover",function(){u.elementMouseover.apply(this,arguments)}),s.dispatch.on("elementMouseout",function(){u.elementMouseout.apply(this,arguments)}),b.interpolate=function(a){return arguments.length?(q=a,b):q},b.duration=function(a){return arguments.length?(t=a,v.reset(t),s.duration(t),b):t},b.dispatch=u,b.scatter=s,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return f},set:function(a){f=a}},height:{get:function(){return g},set:function(a){g=a}},defined:{get:function(){return m},set:function(a){m=a}},clipEdge:{get:function(){return r},set:function(a){r=a}},offset:{get:function(){return o},set:function(a){o=a}},order:{get:function(){return p},set:function(a){p=a}},interpolate:{get:function(){return q},set:function(a){q=a}},x:{get:function(){return k},set:function(a){k=d3.functor(a)}},y:{get:function(){return l},set:function(a){l=d3.functor(a)}},margin:{get:function(){return e},set:function(a){e.top=void 0!==a.top?a.top:e.top,e.right=void 0!==a.right?a.right:e.right,e.bottom=void 0!==a.bottom?a.bottom:e.bottom,e.left=void 0!==a.left?a.left:e.left}},color:{get:function(){return h},set:function(b){h=a.utils.getColor(b)}},style:{get:function(){return n},set:function(a){switch(n=a){case"stack":b.offset("zero"),b.order("default");break;case"stream":b.offset("wiggle"),b.order("inside-out");break;case"stream-center":b.offset("silhouette"),b.order("inside-out");break;case"expand":b.offset("expand"),b.order("default");break;case"stack_percent":b.offset(b.d3_stackedOffset_stackPercent),b.order("default")}}},duration:{get:function(){return t},set:function(a){t=a,v.reset(t),s.duration(t)}}}),a.utils.inheritOptions(b,s),a.utils.initOptions(b),b},a.models.stackedAreaChart=function(){"use strict";function b(k){return L.reset(),L.models(e),u&&L.models(f),v&&L.models(g),k.each(function(k){function D(){u&&X.select(".nv-focus .nv-x.nv-axis").attr("transform","translate(0,"+T+")").transition().duration(I).call(f)}function L(){if(v){if("expand"===e.style()||"stack_percent"===e.style()){var a=g.tickFormat();J&&a===P||(J=a),g.tickFormat(P)}else J&&(g.tickFormat(J),J=null);X.select(".nv-focus .nv-y.nv-axis").transition().duration(0).call(g)}}function Q(a){var b=X.select(".nv-focus .nv-stackedWrap").datum(k.filter(function(a){return!a.disabled}).map(function(b,c){return{key:b.key,area:b.area,classed:b.classed,values:b.values.filter(function(b,c){return e.x()(b,c)>=a[0]&&e.x()(b,c)<=a[1]}),disableTooltip:b.disableTooltip}}));b.transition().duration(I).call(e),D(),L()}var R=d3.select(this);a.utils.initSVG(R);var S=a.utils.availableWidth(o,R,m),T=a.utils.availableHeight(p,R,m)-(x?l.height():0);if(b.update=function(){R.transition().duration(I).call(b)},b.container=this,B.setter(O(k),b.update).getter(N(k)).update(),B.disabled=k.map(function(a){return!!a.disabled}),!C){var U;C={};for(U in B)B[U]instanceof Array?C[U]=B[U].slice(0):C[U]=B[U]}if(!(k&&k.length&&k.filter(function(a){return a.values.length}).length))return a.utils.noData(b,R),b;R.selectAll(".nv-noData").remove(),c=e.xScale(),d=e.yScale();var V=R.selectAll("g.nv-wrap.nv-stackedAreaChart").data([k]),W=V.enter().append("g").attr("class","nvd3 nv-wrap nv-stackedAreaChart").append("g"),X=V.select("g");W.append("g").attr("class","nv-legendWrap"),W.append("g").attr("class","nv-controlsWrap");var Y=W.append("g").attr("class","nv-focus");Y.append("g").attr("class","nv-background").append("rect"),Y.append("g").attr("class","nv-x nv-axis"),Y.append("g").attr("class","nv-y nv-axis"),Y.append("g").attr("class","nv-stackedWrap"),Y.append("g").attr("class","nv-interactive");W.append("g").attr("class","nv-focusWrap");if(s){var Z=r&&"top"===t?S-F:S;if(h.width(Z),X.select(".nv-legendWrap").datum(k).call(h),"bottom"===t){var $=(u?12:0)+10;m.bottom=Math.max(h.height()+$,m.bottom),T=a.utils.availableHeight(p,R,m)-(x?l.height():0);var _=T+$;X.select(".nv-legendWrap").attr("transform","translate(0,"+_+")")}else"top"===t&&(n||m.top==h.height()||(m.top=h.height(),T=a.utils.availableHeight(p,R,m)-(x?l.height():0)),X.select(".nv-legendWrap").attr("transform","translate("+(S-Z)+","+-m.top+")"))}else X.select(".nv-legendWrap").selectAll("*").remove();if(r){var aa=[{key:H.stacked||"Stacked",metaKey:"Stacked",disabled:"stack"!=e.style(),style:"stack"},{key:H.stream||"Stream",metaKey:"Stream",disabled:"stream"!=e.style(),style:"stream"},{key:H.expanded||"Expanded",metaKey:"Expanded",disabled:"expand"!=e.style(),style:"expand"},{key:H.stack_percent||"Stack %",metaKey:"Stack_Percent",disabled:"stack_percent"!=e.style(),style:"stack_percent"}];F=G.length/3*260,aa=aa.filter(function(a){return-1!==G.indexOf(a.metaKey)}),i.width(F).color(["#444","#444","#444"]),X.select(".nv-controlsWrap").datum(aa).call(i);var ba=Math.max(i.height(),s&&"top"===t?h.height():0);m.top!=ba&&(m.top=ba,T=a.utils.availableHeight(p,R,m)-(x?l.height():0)),X.select(".nv-controlsWrap").attr("transform","translate(0,"+-m.top+")")}else X.select(".nv-controlsWrap").selectAll("*").remove();V.attr("transform","translate("+m.left+","+m.top+")"),w&&X.select(".nv-y.nv-axis").attr("transform","translate("+S+",0)"),y&&(j.width(S).height(T).margin({left:m.left,top:m.top}).svgContainer(R).xScale(c),V.select(".nv-interactive").call(j)),X.select(".nv-focus .nv-background rect").attr("width",S).attr("height",T),e.width(S).height(T).color(k.map(function(a,b){return a.color||q(a,b)}).filter(function(a,b){return!k[b].disabled}));var ca=X.select(".nv-focus .nv-stackedWrap").datum(k.filter(function(a){return!a.disabled}));if(u&&f.scale(c)._ticks(a.utils.calcTicksX(S/100,k)).tickSize(-T,0),v){var da;da="wiggle"===e.offset()?0:a.utils.calcTicksY(T/36,k),g.scale(d)._ticks(da).tickSize(-S,0)}if(x){l.width(S),X.select(".nv-focusWrap").attr("transform","translate(0,"+(T+m.bottom+l.margin().top)+")").datum(k.filter(function(a){return!a.disabled})).call(l);var ea=l.brush.empty()?l.xDomain():l.brush.extent();null!==ea&&Q(ea)}else ca.transition().call(e),D(),L();e.dispatch.on("areaClick.toggle",function(a){1===k.filter(function(a){return!a.disabled}).length?k.forEach(function(a){a.disabled=!1}):k.forEach(function(b,c){b.disabled=c!=a.seriesIndex}),B.disabled=k.map(function(a){return!!a.disabled}),E.stateChange(B),b.update()}),h.dispatch.on("stateChange",function(a){for(var c in a)B[c]=a[c];E.stateChange(B),b.update()}),i.dispatch.on("legendClick",function(a,c){a.disabled&&(aa=aa.map(function(a){return a.disabled=!0,a}),a.disabled=!1,e.style(a.style),B.style=e.style(),E.stateChange(B),b.update())}),j.dispatch.on("elementMousemove",function(c){e.clearHighlights();var d,f,g,h=[],i=0,l=!0;if(k.filter(function(a,b){return a.seriesIndex=b,!a.disabled}).forEach(function(j,k){f=a.interactiveBisect(j.values,c.pointXValue,b.x());var m=j.values[f],n=b.y()(m,f);if(null!=n&&e.highlightPoint(k,f,!0),"undefined"!=typeof m){"undefined"==typeof d&&(d=m),"undefined"==typeof g&&(g=b.xScale()(b.x()(m,f)));var o="expand"==e.style()?m.display.y:b.y()(m,f);h.push({key:j.key,value:o,color:q(j,j.seriesIndex),point:m}),z&&"expand"!=e.style()&&null!=o&&(i+=o,l=!1)}}),h.reverse(),h.length>2){var m=b.yScale().invert(c.mouseY),n=null;h.forEach(function(a,b){m=Math.abs(m);var c=Math.abs(a.point.display.y0),d=Math.abs(a.point.display.y);return m>=c&&d+c>=m?void(n=b):void 0}),null!=n&&(h[n].highlight=!0)}z&&"expand"!=e.style()&&h.length>=2&&!l&&h.push({key:A,value:i,total:!0});var o=b.x()(d,f),p=j.tooltip.valueFormatter();"expand"===e.style()||"stack_percent"===e.style()?(K||(K=p),p=d3.format(".1%")):K&&(p=K,K=null),j.tooltip.valueFormatter(p).data({value:o,series:h})(),j.renderGuideLine(g)}),j.dispatch.on("elementMouseout",function(a){e.clearHighlights()}),l.dispatch.on("onBrush",function(a){Q(a)}),E.on("changeState",function(a){"undefined"!=typeof a.disabled&&k.length===a.disabled.length&&(k.forEach(function(b,c){b.disabled=a.disabled[c]}),B.disabled=a.disabled),"undefined"!=typeof a.style&&(e.style(a.style),M=a.style),b.update()})}),L.renderEnd("stacked Area chart immediate"),b}var c,d,e=a.models.stackedArea(),f=a.models.axis(),g=a.models.axis(),h=a.models.legend(),i=a.models.legend(),j=a.interactiveGuideline(),k=a.models.tooltip(),l=a.models.focus(a.models.stackedArea()),m={top:10,right:25,bottom:50,left:60},n=null,o=null,p=null,q=a.utils.defaultColor(),r=!0,s=!0,t="top",u=!0,v=!0,w=!1,x=!1,y=!1,z=!0,A="TOTAL",B=a.utils.state(),C=null,D=null,E=d3.dispatch("stateChange","changeState","renderEnd"),F=250,G=["Stacked","Stream","Expanded"],H={},I=250;B.style=e.style(),f.orient("bottom").tickPadding(7),g.orient(w?"right":"left"),k.headerFormatter(function(a,b){return f.tickFormat()(a,b)}).valueFormatter(function(a,b){return g.tickFormat()(a,b)}),j.tooltip.headerFormatter(function(a,b){return f.tickFormat()(a,b)}).valueFormatter(function(a,b){return null==a?"N/A":g.tickFormat()(a,b)});var J=null,K=null;i.updateState(!1);var L=a.utils.renderWatch(E),M=e.style(),N=function(a){return function(){return{active:a.map(function(a){return!a.disabled}),style:e.style()}}},O=function(a){return function(b){void 0!==b.style&&(M=b.style),void 0!==b.active&&a.forEach(function(a,c){a.disabled=!b.active[c]})}},P=d3.format("%");return e.dispatch.on("elementMouseover.tooltip",function(a){a.point.x=e.x()(a.point),a.point.y=e.y()(a.point),k.data(a).hidden(!1)}),e.dispatch.on("elementMouseout.tooltip",function(a){k.hidden(!0)}),b.dispatch=E,b.stacked=e,b.legend=h,b.controls=i,b.xAxis=f,b.x2Axis=l.xAxis,b.yAxis=g,b.y2Axis=l.yAxis,b.interactiveLayer=j,b.tooltip=k,b.focus=l,b.dispatch=E,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{width:{get:function(){return o},set:function(a){o=a}},height:{get:function(){return p},set:function(a){p=a}},showLegend:{get:function(){return s},set:function(a){s=a}},legendPosition:{get:function(){return t},set:function(a){t=a}},showXAxis:{get:function(){return u},set:function(a){u=a}},showYAxis:{get:function(){return v},set:function(a){v=a}},defaultState:{get:function(){return C},set:function(a){C=a}},noData:{get:function(){return D},set:function(a){D=a}},showControls:{get:function(){return r},set:function(a){r=a}},controlLabels:{get:function(){return H},set:function(a){H=a}},controlOptions:{get:function(){return G},set:function(a){G=a}},showTotalInTooltip:{get:function(){return z},set:function(a){z=a}},totalLabel:{get:function(){return A},set:function(a){A=a}},focusEnable:{get:function(){return x},set:function(a){x=a}},focusHeight:{get:function(){return l.height()},set:function(a){l.height(a)}},brushExtent:{get:function(){return l.brushExtent()},set:function(a){l.brushExtent(a)}},margin:{get:function(){return m},set:function(a){void 0!==a.top&&(m.top=a.top,n=a.top),m.right=void 0!==a.right?a.right:m.right,m.bottom=void 0!==a.bottom?a.bottom:m.bottom,m.left=void 0!==a.left?a.left:m.left}},focusMargin:{get:function(){return l.margin},set:function(a){l.margin.top=void 0!==a.top?a.top:l.margin.top,l.margin.right=void 0!==a.right?a.right:l.margin.right,l.margin.bottom=void 0!==a.bottom?a.bottom:l.margin.bottom,l.margin.left=void 0!==a.left?a.left:l.margin.left}},duration:{get:function(){return I},set:function(a){I=a,L.reset(I),e.duration(I),f.duration(I),g.duration(I)}},color:{get:function(){return q},set:function(b){q=a.utils.getColor(b),h.color(q),e.color(q),l.color(q)}},x:{get:function(){return e.x()},set:function(a){e.x(a),l.x(a)}},y:{get:function(){return e.y()},set:function(a){e.y(a),l.y(a)}},rightAlignYAxis:{get:function(){return w},set:function(a){w=a,g.orient(w?"right":"left")}},useInteractiveGuideline:{get:function(){return y},set:function(a){y=!!a,b.interactive(!a),b.useVoronoi(!a),e.scatter.interactive(!a)}}}),a.utils.inheritOptions(b,e),a.utils.initOptions(b),b},a.models.stackedAreaWithFocusChart=function(){return a.models.stackedAreaChart().margin({bottom:30}).focusEnable(!0)},a.models.sunburst=function(){"use strict";function b(a){var b=c(a);return b>90?180:0}function c(a){var b=Math.max(0,Math.min(2*Math.PI,F(a.x))),c=Math.max(0,Math.min(2*Math.PI,F(a.x+a.dx))),d=(b+c)/2*(180/Math.PI)-90;return d}function d(a){var b=Math.max(0,Math.min(2*Math.PI,F(a.x))),c=Math.max(0,Math.min(2*Math.PI,F(a.x+a.dx)));return(c-b)/(2*Math.PI)}function e(a){var b=Math.max(0,Math.min(2*Math.PI,F(a.x))),c=Math.max(0,Math.min(2*Math.PI,F(a.x+a.dx))),d=c-b;return d>z}function f(a,b){var c=d3.interpolate(F.domain(),[l.x,l.x+l.dx]),d=d3.interpolate(G.domain(),[l.y,1]),e=d3.interpolate(G.range(),[l.y?20:0,o]);return 0===b?function(){return J(a)}:function(b){return F.domain(c(b)),G.domain(d(b)).range(e(b)),J(a)}}function g(a){var b=d3.interpolate({x:a.x0,dx:a.dx0,y:a.y0,dy:a.dy0},a);return function(c){var d=b(c);return a.x0=d.x,a.dx0=d.dx,a.y0=d.y,a.dy0=d.dy,J(d)}}function h(a){var b=B(a);I[b]||(I[b]={});var c=I[b];c.dx=a.dx,c.x=a.x,c.dy=a.dy,c.y=a.y}function i(a){a.forEach(function(a){var b=B(a),c=I[b];c?(a.dx0=c.dx,a.x0=c.x,a.dy0=c.dy,a.y0=c.y):(a.dx0=a.dx,a.x0=a.x,a.dy0=a.dy,a.y0=a.y),h(a)})}function j(a){var d=v.selectAll("text"),g=v.selectAll("path");d.transition().attr("opacity",0),l=a,g.transition().duration(D).attrTween("d",f).each("end",function(d){if(d.x>=a.x&&d.x<a.x+a.dx&&d.depth>=a.depth){var f=d3.select(this.parentNode),g=f.select("text");g.transition().duration(D).text(function(a){return y(a)}).attr("opacity",function(a){return e(a)?1:0}).attr("transform",function(){var e=this.getBBox().width;if(0===d.depth)return"translate("+e/2*-1+",0)";if(d.depth===a.depth)return"translate("+(G(d.y)+5)+",0)";var f=c(d),g=b(d);return 0===g?"rotate("+f+")translate("+(G(d.y)+5)+",0)":"rotate("+f+")translate("+(G(d.y)+e+5)+",0)rotate("+g+")"})}})}function k(f){return K.reset(),f.each(function(f){v=d3.select(this),m=a.utils.availableWidth(q,v,p),n=a.utils.availableHeight(r,v,p),o=Math.min(m,n)/2,G.range([0,o]);var h=v.select("g.nvd3.nv-wrap.nv-sunburst");h[0][0]?h.attr("transform","translate("+(m/2+p.left+p.right)+","+(n/2+p.top+p.bottom)+")"):h=v.append("g").attr("class","nvd3 nv-wrap nv-sunburst nv-chart-"+u).attr("transform","translate("+(m/2+p.left+p.right)+","+(n/2+p.top+p.bottom)+")"),v.on("click",function(a,b){E.chartClick({data:a,index:b,pos:d3.event,id:u})}),H.value(t[s]||t.count);var k=H.nodes(f[0]).reverse();i(k);var l=h.selectAll(".arc-container").data(k,B),z=l.enter().append("g").attr("class","arc-container");z.append("path").attr("d",J).style("fill",function(a){return a.color?a.color:w(C?(a.children?a:a.parent).name:a.name)}).style("stroke","#FFF").on("click",function(a,b){j(a),E.elementClick({data:a,index:b})}).on("mouseover",function(a,b){d3.select(this).classed("hover",!0).style("opacity",.8),E.elementMouseover({data:a,color:d3.select(this).style("fill"),percent:d(a)})}).on("mouseout",function(a,b){d3.select(this).classed("hover",!1).style("opacity",1),E.elementMouseout({data:a})}).on("mousemove",function(a,b){E.elementMousemove({data:a})}),l.each(function(a){d3.select(this).select("path").transition().duration(D).attrTween("d",g)}),x&&(l.selectAll("text").remove(),l.append("text").text(function(a){return y(a)}).transition().duration(D).attr("opacity",function(a){return e(a)?1:0}).attr("transform",function(a){var d=this.getBBox().width;if(0===a.depth)return"rotate(0)translate("+d/2*-1+",0)";var e=c(a),f=b(a);return 0===f?"rotate("+e+")translate("+(G(a.y)+5)+",0)":"rotate("+e+")translate("+(G(a.y)+d+5)+",0)rotate("+f+")"})),j(k[k.length-1]),l.exit().transition().duration(D).attr("opacity",0).each("end",function(a){var b=B(a);I[b]=void 0}).remove()}),K.renderEnd("sunburst immediate"),k}var l,m,n,o,p={top:0,right:0,bottom:0,left:0},q=600,r=600,s="count",t={count:function(a){return 1},value:function(a){return a.value||a.size},size:function(a){return a.value||a.size}},u=Math.floor(1e4*Math.random()),v=null,w=a.utils.defaultColor(),x=!1,y=function(a){return"count"===s?a.name+" #"+a.value:a.name+" "+(a.value||a.size)},z=.02,A=function(a,b){return a.name>b.name},B=function(a,b){return a.name},C=!0,D=500,E=d3.dispatch("chartClick","elementClick","elementDblClick","elementMousemove","elementMouseover","elementMouseout","renderEnd"),F=d3.scale.linear().range([0,2*Math.PI]),G=d3.scale.sqrt(),H=d3.layout.partition().sort(A),I={},J=d3.svg.arc().startAngle(function(a){return Math.max(0,Math.min(2*Math.PI,F(a.x)))}).endAngle(function(a){return Math.max(0,Math.min(2*Math.PI,F(a.x+a.dx)))}).innerRadius(function(a){return Math.max(0,G(a.y))}).outerRadius(function(a){return Math.max(0,G(a.y+a.dy))}),K=a.utils.renderWatch(E);return k.dispatch=E,k.options=a.utils.optionsFunc.bind(k),k._options=Object.create({},{width:{get:function(){return q},set:function(a){q=a}},height:{get:function(){return r},set:function(a){r=a}},mode:{get:function(){return s},set:function(a){s=a}},id:{get:function(){return u},set:function(a){u=a}},duration:{get:function(){return D},set:function(a){D=a}},groupColorByParent:{get:function(){return C},set:function(a){C=!!a}},showLabels:{get:function(){return x},set:function(a){x=!!a}},labelFormat:{get:function(){return y},set:function(a){y=a}},labelThreshold:{get:function(){return z},set:function(a){z=a}},sort:{get:function(){return A},set:function(a){A=a}},key:{get:function(){return B},set:function(a){B=a}},margin:{get:function(){return p},set:function(a){p.top=void 0!=a.top?a.top:p.top,p.right=void 0!=a.right?a.right:p.right,p.bottom=void 0!=a.bottom?a.bottom:p.bottom,p.left=void 0!=a.left?a.left:p.left}},color:{get:function(){return w},set:function(b){w=a.utils.getColor(b)}}}),a.utils.initOptions(k),k},a.models.sunburstChart=function(){"use strict";function b(d){return n.reset(),n.models(c),d.each(function(d){var h=d3.select(this);a.utils.initSVG(h);var i=a.utils.availableWidth(f,h,e),j=a.utils.availableHeight(g,h,e);return b.update=function(){0===l?h.call(b):h.transition().duration(l).call(b)},b.container=h,d&&d.length?(h.selectAll(".nv-noData").remove(),c.width(i).height(j).margin(e),void h.call(c)):(a.utils.noData(b,h),b)}),n.renderEnd("sunburstChart immediate"),b}var c=a.models.sunburst(),d=a.models.tooltip(),e={top:30,right:20,bottom:20,left:20},f=null,g=null,h=a.utils.defaultColor(),i=!1,j=(Math.round(1e5*Math.random()),null),k=null,l=250,m=d3.dispatch("stateChange","changeState","renderEnd"),n=a.utils.renderWatch(m);return d.duration(0).headerEnabled(!1).valueFormatter(function(a){return a}),c.dispatch.on("elementMouseover.tooltip",function(a){a.series={key:a.data.name,value:a.data.value||a.data.size,color:a.color,percent:a.percent},i||(delete a.percent,delete a.series.percent),d.data(a).hidden(!1)}),c.dispatch.on("elementMouseout.tooltip",function(a){d.hidden(!0)}),c.dispatch.on("elementMousemove.tooltip",function(a){d()}),b.dispatch=m,b.sunburst=c,b.tooltip=d,b.options=a.utils.optionsFunc.bind(b),b._options=Object.create({},{noData:{get:function(){return k},set:function(a){k=a}},defaultState:{get:function(){return j},set:function(a){j=a}},showTooltipPercent:{get:function(){return i},set:function(a){i=a}},color:{get:function(){return h},set:function(a){h=a,c.color(h)}},duration:{get:function(){return l},set:function(a){l=a,n.reset(l),c.duration(l)}},margin:{get:function(){return e},set:function(a){e.top=void 0!==a.top?a.top:e.top,e.right=void 0!==a.right?a.right:e.right,e.bottom=void 0!==a.bottom?a.bottom:e.bottom,e.left=void 0!==a.left?a.left:e.left,c.margin(e)}}}),a.utils.inheritOptions(b,c),a.utils.initOptions(b),b},a.version="1.8.5"}();
+//# sourceMappingURL=nv.d3.min.js.map
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map
new file mode 100644
index 0000000..594da5a3
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/nvd3-1.8.5.min.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"nv.d3.min.js","sources":["../src/core.js","../src/dom.js","../src/interactiveLayer.js","../src/tooltip.js","../src/utils.js","../src/models/axis.js","../src/models/boxPlot.js","../src/models/boxPlotChart.js","../src/models/bullet.js","../src/models/bulletChart.js","../src/models/candlestickBar.js","../src/models/cumulativeLineChart.js","../src/models/discreteBar.js","../src/models/discreteBarChart.js","../src/models/distribution.js","../src/models/focus.js","../src/models/forceDirectedGraph.js","../src/models/furiousLegend.js","../src/models/historicalBar.js","../src/models/historicalBarChart.js","../src/models/legend.js","../src/models/line.js","../src/models/lineChart.js","../src/models/linePlusBarChart.js","../src/models/multiBar.js","../src/models/multiBarChart.js","../src/models/multiBarHorizontal.js","../src/models/multiBarHorizontalChart.js","../src/models/multiChart.js","../src/models/ohlcBar.js","../src/models/parallelCoordinates.js","../src/models/parallelCoordinatesChart.js","../src/models/pie.js","../src/models/pieChart.js","../src/models/sankey.js","../src/models/sankeyChart.js","../src/models/scatter.js","../src/models/scatterChart.js","../src/models/sparkline.js","../src/models/sparklinePlus.js","../src/models/stackedArea.js","../src/models/stackedAreaChart.js","../src/models/sunburst.js","../src/models/sunburstChart.js"],"names":["nv","dev","tooltip","utils","models","charts","logs","dom","d3","require","dispatch","Function","prototype","bind","oThis","this","TypeError","aArgs","Array","slice","call","arguments","fToBind","fNOP","fBound","apply","concat","on","e","startTime","Date","endTime","totalTime","log","window","console","length","deprecated","name","info","warn","render","step","active","render_start","renderLoop","chart","graph","i","queue","generate","callback","splice","setTimeout","render_end","addGraph","obj","push","module","exports","write","undefined","fastdom","mutate","read","measure","interactiveGuideline","layer","selection","each","data","mouseHandler","d3mouse","mouse","mouseX","mouseY","subtractMargin","mouseOutAnyReason","isMSIE","event","offsetX","offsetY","target","tagName","className","baseVal","match","margin","left","top","type","availableWidth","availableHeight","relatedTarget","ownerSVGElement","nvPointerEventsClass","elementMouseout","renderGuideLine","hidden","scaleIsOrdinal","xScale","rangeBands","pointXValue","elementIndex","bisect","range","rangeBand","domain","invert","elementMousemove","elementDblclick","elementClick","elementMouseDown","elementMouseUp","container","select","width","height","wrap","selectAll","wrapEnter","enter","append","attr","svgContainer","guideLine","x","showGuideLine","line","NaNtoZero","String","d","exit","remove","scale","linear","ActiveXObject","duration","hideDelay","_","interactiveBisect","values","searchVal","xAccessor","_xAccessor","_cmp","v","bisector","index","max","currentValue","nextIndex","min","nextValue","Math","abs","nearestValueIndex","threshold","yDistMax","Infinity","indexToHighlight","forEach","delta","initTooltip","node","document","body","id","classes","style","classed","nvtooltip","enabled","dataSeriesExists","newContent","contentGenerator","innerHTML","positionTooltip","floor","random","gravity","distance","snapDistance","lastPosition","headerEnabled","valueFormatter","headerFormatter","keyFormatter","table","createElement","theadEnter","html","value","tbodyEnter","trowEnter","p","series","highlight","color","total","key","filter","percent","format","opacityScale","opacity","outerHTML","footer","position","pos","clientX","clientY","getComputedStyle","transform","client","getBoundingClientRect","isArray","isObject","calcGravityOffset","tmp","offsetHeight","offsetWidth","clientWidth","documentElement","clientHeight","gravityOffset","interrupt","transition","delay","old_translate","new_translate","round","translateInterpolator","interpolateString","is_hidden","styleTween","options","optionsFunc","_options","Object","create","get","set","chartContainer","fixedTop","offset","point","y","initOptions","windowSize","size","innerWidth","innerHeight","compatMode","a","isFunction","isDate","toString","isNumber","isNaN","windowResize","handler","addEventListener","clear","removeEventListener","getColor","defaultColor","color_scale","ordinal","category20","customTheme","dictionary","getKey","defaultColors","defIndex","pjax","links","content","load","href","fragment","parentNode","replaceChild","history","pushState","textContent","preventDefault","state","calcApproxTextWidth","svgTextElem","text","fontSize","parseInt","replace","textLength","n","watchTransition","renderWatch","args","_duration","renderStack","self","model","__rendered","m","arg","renderEnd","indexOf","reset","pop","every","deepExtend","dst","sources","source","srcObj","_setState","_getState","init","changed","getter","fn","setter","update","_set","settings","JSON","stringify","change","map","calcTicksX","numTicks","numValues","stream_len","calcTicksY","initOption","_calls","_overrides","ops","getOwnPropertyNames","calls","inheritOptionsD3","d3_source","oplist","_d3options","unshift","rebind","arrayUnique","sort","item","symbolMap","symbol","t","s","svg","symbolTypes","functor","inheritOptions","inherited","_inherited","d3ops","initSVG","nvd3-svg","sanitizeHeight","sanitizeWidth","bottom","right","noData","opt","noDataText","wrapTicks","word","words","split","reverse","lineNumber","lineHeight","dy","parseFloat","tspan","join","getComputedTextLength","arrayEquals","array1","array2","l","axis","g","ticks","orient","scale0","fmt","tickFormat","axisLabel","axisLabelText","xLabelMargin","axisMaxMin","w","isOrdinal","showMaxMin","tickPadding","axisLabelDistance","maxTextWidth","textHeight","xTicks","rotateLabelsRule","rotateLabels","box","sin","PI","staggerLabels","rotateYLabel","maxMinRange","err","copy","boxPlot","xDomain","getX","xRange","yData","yDomain","yMin","yMax","q1","getQ1","q3","getQ3","wl","getWl","wh","getWh","olItems","getOlItems","getOlValue","yScale","yRange","xScale0","yScale0","boxplots","boxEnter","j","hover","f","box_width","maxBoxWidth","box_left","box_right","endpoint","elementMouseover","getQ2","outliers","getOlColor","getOlLabel","label","Q1","Q2","Q3","whisker_low","whisker_high","q2","itemColor","outlierValue","outlierLabel","outlierColor","boxPlotChart","boxplot","showXAxis","xAxis","showYAxis","yAxis","beforeUpdate","clamp","gEnter","defsEnter","rightAlignYAxis","barsWrap","datum","disabled","tickSize","evt","tooltipContent","bullet","sortLabels","labels","lz","b","iA","iB","descending","rangez","ranges","markerz","markers","markerLinez","markerLines","measurez","measures","rangeLabelz","rangeLabels","markerLabelz","markerLabels","markerLineLabelz","markerLineLabels","measureLabelz","measureLabels","x1","extent","merge","forceX","__chart__","il","rangeClassNames","legacyRangeClassNames","w1","xp1","h3","markerData","marker","markerLinesData","defaultRangeLabels","bulletChart","x0","title","subtitle","bulletWrap","tick","tickEnter","tickUpdate","timer","flush","candlestickBar","barWidth","padData","getLow","forceY","getHigh","chartClick","clipEdge","tickGroups","getOpen","getClose","getY","open","close","high","low","interactive","highlightPoint","pointIndex","isHoverOver","clearHighlights","cumulativeLineChart","lines","dragStart","dragMove","dx","updateZero","dragEnd","stateChange","indexLine","oldDuration","stateSetter","stateGetter","defaultState","indexDrag","behavior","drag","rescaleY","seriesDomains","initialDomain","completeDomain","indexify","interactivePointerEvents","showLegend","legend","marginTop","showControls","controlsData","controls","rightAlign","tempDisabled","useInteractiveGuideline","interactiveLayer","display","linesWrap","seriesIndex","avgLineData","average","avgLines","getAvgLineY","yVal","_ticks","newState","singlePoint","pointXLocation","allData","yValue","domainExtent","xValue","idx","indexifyYGetter","indexValue","noErrorCheck","updateState","useVoronoi","discreteBar","seriesData","y0","showValues","groups","bars","barsEnter","element","stopPropagation","elementDblClick","valueFormat","rectClass","discreteBarChart","discretebar","wrapLabels","distribution","naxis","distWrap","dist","getData","focus","resizePath","updateBrushBG","brush","empty","brushExtent","brushBG","leftWidth","rightWidth","onBrush","shouldDispatch","contentWrap","syncBrushing","brushBGenter","gBrush","pointActive","interpolate","xTickFormat","yTickFormat","forceDirectedGraph","nodes","nodeFieldSet","Set","keys","add","force","layout","linkStrength","friction","linkDistance","linkDist","charge","theta","alpha","start","link","sqrt","radius","py","px","nodeColor","field","linkExtras","nodeExtras","furiousLegend","setTextColor","vers","expanded","disengaged","setBGColor","seriesShape","seriesEnter","property","seriesCheckbox","seriesText","legendMouseover","legendMouseout","legendClick","radioButtonMode","userDisabled","engaged","legendDblclick","versPadding","align","seriesWidths","legendText","maxKeyLength","trimmedKey","substring","nodeTextLength","Error","padding","seriesPerRow","legendWidth","columnWidths","k","reduce","prev","cur","array","xPositions","curX","ceil","xpos","ypos","newxpos","maxwidth","historicalBar","rval","historicalBarChart","bar_model","transitionDuration","tooltipHide","ohlcBarChart","ohlcBar","candlestickBarChart","setBGOpacity","insert","seriesBG","scatter","scatterWrap","strokeWidth","fillOpacity","areaPaths","isArea","area","defined","y1","linePaths","pointSize","pointDomain","lineChart","updateXAxis","updateYAxis","focusLinesWrap","disableTooltip","focusEnable","focusEnter","legendPosition","currentValues","pointYValue","defaultValueFormatter","yPos","x2Axis","y2Axis","focusHeight","focusShowAxisX","focusShowAxisY","focusMargin","lineWithFocusChart","linePlusBarChart","availableHeight2","x2","availableHeight1","bar","focusBarsWrap","dataBars","allDisabled","dataLines","dataLine","switchYAxisOrder","y1Axis","y2","barsOpacity","linesOpacity","y1Opacity","y2Opacity","margin2","y3","lines2","bars2","y4","series1","series2","contextEnter","legendXPosition","originalKey","legendRightAxisHint","legendLeftAxisHint","bars2Wrap","lines2Wrap","y3Axis","y4Axis","getBarsAxis","main","getLinesAxis","multiBar","nonStackableCount","hideable","stacked","parsed","stack","stackOffset","nonStackable","nonStackableSeries","posBase","negBase","groupSpacing","exitTransition","last_datalength","barColor","rgb","darker","barSelection","multiBarChart","multibar","controlWidth","controlLabels","grouped","getTranslate","staggerUp","staggerDown","totalInBetweenTicks","reduceXTicks","multiBarHorizontal","valuePadding","getYerr","xerr","mid","path","yerr","showBarLabels","yErr","multiBarHorizontalChart","multiChart","mouseover_line","yaxis","yAxis2","yAxis1","mouseover_scatter","mouseover_stack","stack1","mouseover_bar","bars1","serieIndex","dataLines1","dataLines2","dataScatters1","dataScatters2","dataBars1","dataBars2","dataStack1","dataStack2","color_array","lines1","scatters1","scatters2","stack2","lines1Wrap","scatters1Wrap","bars1Wrap","stack1Wrap","scatters2Wrap","stack2Wrap","extraValue1","aVal","extraValue2","yScale1","yDomain1","yScale2","yDomain2","stackedArea","parallelCoordinates","enabledDimensions","displayMissingValuesline","axisWithUndefinedValues","newscale","missingValuesline","missingValueslineText","restoreBrush","visible","filters","brushDomain","dimension","hasOnlyNaN","oldDomainMaxValue","hasNaN","dimensions","updateTicks","brushstart","displayBrush","actives","dimensionNames","extents","foreground","isActive","brushend","hasActiveBrush","brushEnd","currentTicks","tickValues","dragging","__origin__","background","dimensionPosition","currentPosition","dimensionsOrder","newData","val","dataValues","dimensionData","rangePoints","onlyUndefinedValues","tension","lineTension","axisDrag","lineData","undefinedValuesLabel","dimensionsEnter","formerActive","activeChanged","dimensionFormats","parallelCoordinatesChart","originalPosition","parallelCoordinatesWrap","isSorted","nanValue","str","tp","dim","dd","pie","arcTween","endAngle","startAngle","donut","innerRadius","_current","arcs","arcsRadiusOuter","arcsRadiusInner","arcsRadius","outer","inner","donutRatio","growOnHover","g_pie","arcsOver","arc","outerRadius","arcOver","cornerRadius","padAngle","titleOffset","slices","pieLabels","ae","attrTween","showLabels","labelsArc","labelsOutside","group","labelSunbeamLayout","rotateAngle","centroid","labelLocationHash","avgHeight","avgWidth","createHashKey","coordinates","getSlicePercentage","center","labelThreshold","hashKey","labelType","pieLabelsOutside","donutLabelsOutside","labelFormat","pieChart","pieWrap","showTooltipPercent","sankey","computeNodeLinks","sourceLinks","targetLinks","computeNodeValues","sum","computeNodeBreadths","nextNodes","remainingNodes","nodeWidth","sinksRight","moveSinksRight","scaleNodeBreadths","kx","computeNodeDepths","iterations","initializeNodeDepth","ky","nodesByBreadth","nodePadding","relaxLeftToRight","weightedSource","sy","breadth","relaxRightToLeft","weightedTarget","ty","resolveCollisions","ascendingDepth","nest","sortKeys","ascending","entries","computeLinkDepths","ascendingSourceDepth","ascendingTargetDepth","relayout","xi","interpolateNumber","curvature","x3","linkPath","sankeyChart","dragmove","testData","isDataValid","dataAvailable","error","showError","linkTitle","origin","appendChild","nodeFillColor","nodeStrokeColor","nodeTitle","units","formatNumber","message","nodeStyle","fillColor","strokeColor","getCache","_cache","delCache","getDiffs","cache","diffs","hasOwnProperty","updateInteractiveLayer","needsUpdate","vertices","groupIndex","pX","pY","pointArray","bounds","geom","polygon","voronoi","clip","pointPaths","vPointPaths","showVoronoi","clipVoronoi","pointClips","clipRadius","mouseEventCallback","el","mDispatch","scrollTop","pageYOffset","scrollLeft","pageXOffset","relativePos","logScale","sizeDomain","getSize","padDataOuter","z","forceSize","sizeRange","_sizeRange_def","z0","scaleDiff","width0","height0","sizeDiff","pointBorderColor","points","getShape","titles","interactiveUpdateDelay","clearTimeout","timeoutID","shape","notActive","pointScale","pointRange","forcePoint","pointShape","scatterChart","showDistX","distX","showDistY","distY","regWrap","regLine","intercept","slope","sparkline","paths","result","yValues","maxPoint","lastIndexOf","minPoint","currentPoint","showMinMaxPoints","showCurrentPoint","animate","sparklinePlus","updateValueLine","paused","hoverValue","hoverEnter","sparklineHover","getClosestIndex","closestIndex","sparklineWrap","showLastValue","valueWrap","rightAlignValue","alignValue","dataRaw","aseries","dataFiltered","order","out","zeroArea","areaMouseover","pageX","pageY","areaMouseout","areaClick","d3_stackedOffset_stackPercent","stackData","o","stackedAreaChart","currentFormat","oldYTickFormat","percentFormatter","stackedWrap","xAxisHeight","legendTop","metaKey","stream","stack_percent","controlOptions","requiredTop","valueSum","allNullValues","tooltipValue","showTotalInTooltip","stackedY0","stackedY","totalLabel","oldValueFormatter","stackedAreaWithFocusChart","sunburst","rotationToAvoidUpsideDown","centerAngle","computeCenterAngle","computeNodePercentage","labelThresholdMatched","arcTweenZoom","xd","yd","yr","arcTweenUpdate","ipo","dx0","dy0","updatePrevPosition","prevPositions","pP","storeRetrievePrevPositions","zoomClick","depth","arcText","getBBox","rotation","partition","modes","mode","cG","cGE","groupColorByParent","children","parent","count","d1","d2","sunburstChart"],"mappings":";;AAAA,YAEA,GAAIA,KAGJA,GAAGC,KAAM,EACTD,EAAGE,QAAUF,EAAGE,YAChBF,EAAGG,MAAQH,EAAGG,UACdH,EAAGI,OAASJ,EAAGI,WACfJ,EAAGK,UACHL,EAAGM,QACHN,EAAGO,OAGoB,mBAAb,SAAgD,mBAAd,UAA2C,mBAAR,MAC3EC,GAAKC,QAAO,OAGhBT,EAAGU,SAAWF,GAAGE,SAAQ,eAAiB,cAOrCC,SAASC,UAAUC,OACpBF,SAASC,UAAUC,KAAO,SAAUC,GAChC,GAAoB,kBAATC,MAEP,KAAM,IAAIC,WAAS,uEAGvB,IAAIC,GAAQC,MAAMN,UAAUO,MAAMC,KAAKC,UAAW,GAC9CC,EAAUP,KACVQ,EAAO,aACPC,EAAS,WACL,MAAOF,GAAQG,MAAMV,eAAgBQ,IAAQT,EACnCC,KACAD,EACNG,EAAMS,OAAOR,MAAMN,UAAUO,MAAMC,KAAKC,aAKpD,OAFAE,GAAKX,UAAYG,KAAKH,UACtBY,EAAOZ,UAAY,GAAIW,GAChBC,IAKXxB,EAAGC,MACHD,EAAGU,SAASiB,GAAE,eAAiB,SAASC,GACpC5B,EAAGM,KAAKuB,WAAa,GAAIC,QAG7B9B,EAAGU,SAASiB,GAAE,aAAe,SAASC,GAClC5B,EAAGM,KAAKyB,SAAW,GAAID,MACvB9B,EAAGM,KAAK0B,UAAYhC,EAAGM,KAAKyB,QAAU/B,EAAGM,KAAKuB,UAC9C7B,EAAGiC,IAAG,QAAUjC,EAAGM,KAAK0B,cAQhChC,EAAGiC,IAAM,WACL,GAAIjC,EAAGC,KAAOiC,OAAOC,SAAWA,QAAQF,KAAOE,QAAQF,IAAIR,MACvDU,QAAQF,IAAIR,MAAMU,QAASd,eAC1B,IAAIrB,EAAGC,KAAOiC,OAAOC,SAAiC,kBAAfA,SAAQF,KAAqBtB,SAASC,UAAUC,KAAM,CAC9F,GAAIoB,GAAMtB,SAASC,UAAUC,KAAKO,KAAKe,QAAQF,IAAKE,QACpDF,GAAIR,MAAMU,QAASd,WAEvB,MAAOA,WAAUA,UAAUe,OAAS,IAIxCpC,EAAGqC,WAAa,SAASC,EAAMC,GACvBJ,SAAWA,QAAQK,MACnBL,QAAQK,KAAI,kBAAqBF,EAAO,0BAA2BC,GAAQ,KAOnFvC,EAAGyC,OAAS,SAAgBC,GAExBA,EAAOA,GAAQ,EAEf1C,EAAGyC,OAAOE,QAAS,EACnB3C,EAAGU,SAASkC,cAEZ,IAAIC,GAAa,WAGb,IAAK,GAFDC,GAAOC,EAEFC,EAAI,EAAON,EAAJM,IAAaD,EAAQ/C,EAAGyC,OAAOQ,MAAMD,IAAKA,IACtDF,EAAQC,EAAMG,iBACHH,GAAMI,gBAAkB,WAAYJ,EAAMI,SAASL,EAGlE9C,GAAGyC,OAAOQ,MAAMG,OAAO,EAAGJ,GAEtBhD,EAAGyC,OAAOQ,MAAMb,OAChBiB,WAAWR,IAGX7C,EAAGU,SAAS4C,aACZtD,EAAGyC,OAAOE,QAAS,GAI3BU,YAAWR,IAGf7C,EAAGyC,OAAOE,QAAS,EACnB3C,EAAGyC,OAAOQ,SAmBVjD,EAAGuD,SAAW,SAASC,SACRnC,WAAU,UAAa,YAC9BmC,GAAON,SAAU7B,UAAU,GAAI8B,SAAU9B,UAAU,KAGvDrB,EAAGyC,OAAOQ,MAAMQ,KAAKD,GAEhBxD,EAAGyC,OAAOE,QACX3C,EAAGyC,UAKY,mBAAb,SAAgD,mBAAd,WAC1CiB,OAAOC,QAAU3D,GAGI,mBAAb,UACRkC,OAAOlC,GAAKA,GClJdA,EAAGO,IAAIqD,MAAQ,SAAST,GACvB,MAAuBU,UAAnB3B,OAAO4B,QACHA,QAAQC,OAAOZ,GAEhBA,KASRnD,EAAGO,IAAIyD,KAAO,SAASb,GACtB,MAAuBU,UAAnB3B,OAAO4B,QACHA,QAAQG,QAAQd,GAEjBA,KCfRnD,EAAGkE,qBAAuB,WACtB,YAkBA,SAASC,GAAMC,GACXA,EAAUC,KAAK,SAASC,GAapB,QAASC,KACL,GAAIC,GAAUhE,GAAGiE,MAAM1D,MACnB2D,EAASF,EAAQ,GACjBG,EAASH,EAAQ,GACjBI,GAAiB,EACjBC,GAAoB,CAuCxB,IAtCIC,IAQAJ,EAASlE,GAAGuE,MAAMC,QAClBL,EAASnE,GAAGuE,MAAME,QAWa,QAA5BzE,GAAGuE,MAAMG,OAAOC,UACfP,GAAiB,GAGjBpE,GAAGuE,MAAMG,OAAOE,UAAUC,QAAQC,MAAK,eACvCT,GAAoB,IAKzBD,IACCF,GAAUa,EAAOC,KACjBb,GAAUY,EAAOE,KAMC,aAAlBjF,GAAGuE,MAAMW,MACG,EAAThB,GAAuB,EAATC,GACdD,EAASiB,GAAkBhB,EAASiB,GACnCpF,GAAGuE,MAAMc,eAA4DhC,SAA3CrD,GAAGuE,MAAMc,cAAcC,iBAClDjB,EACD,CAEF,GAAIC,GACItE,GAAGuE,MAAMc,eACqChC,SAA3CrD,GAAGuE,MAAMc,cAAcC,kBACejC,SAArCrD,GAAGuE,MAAMc,cAAcT,WACpB5E,GAAGuE,MAAMc,cAAcT,UAAUE,MAAMpF,EAAQ6F,uBAEtD,MASR,OANArF,GAASsF,iBACLtB,OAAQA,EACRC,OAAQA,IAEZR,EAAM8B,gBAAgB,UACtB/F,GAAQgG,QAAO,GAGfhG,EAAQgG,QAAO,EAInB,IAAIC,GAA8C,kBAAtBC,GAAOC,WAC/BC,EAAczC,MAGlB,IAAIsC,EAAgB,CAChB,GAAII,GAAe/F,GAAGgG,OAAOJ,EAAOK,QAAS/B,GAAU,CAEvD,MAAI0B,EAAOK,QAAQF,GAAgBH,EAAOM,aAAehC,GAUrD,MANAhE,GAASsF,iBACLtB,OAAQA,EACRC,OAAQA,IAEZR,EAAM8B,gBAAgB,UACtB/F,GAAQgG,QAAO,EARfI,GAAcF,EAAOO,SAASnG,GAAGgG,OAAOJ,EAAOK,QAAS/B,GAAU,OAatE4B,GAAcF,EAAOQ,OAAOlC,EAGhChE,GAASmG,kBACLnC,OAAQA,EACRC,OAAQA,EACR2B,YAAaA,IAIK,aAAlB9F,GAAGuE,MAAMW,MACThF,EAASoG,iBACLpC,OAAQA,EACRC,OAAQA,EACR2B,YAAaA,IAKC,UAAlB9F,GAAGuE,MAAMW,MACThF,EAASqG,cACLrC,OAAQA,EACRC,OAAQA,EACR2B,YAAaA,IAKC,cAAlB9F,GAAGuE,MAAMW,MACZhF,EAASsG,kBACRtC,OAAQA,EACRC,OAAQA,EACR2B,YAAaA,IAKO,YAAlB9F,GAAGuE,MAAMW,MACZhF,EAASuG,gBACRvC,OAAQA,EACRC,OAAQA,EACR2B,YAAaA,IAlJnB,GAAIY,GAAY1G,GAAG2G,OAAOpG,MACtB4E,EAAkByB,GAAS,IAAMxB,EAAmByB,GAAU,IAC9DC,EAAOJ,EAAUK,UAAS,qCACzBjD,MAAMA,IACPkD,EAAYF,EAAKG,QAChBC,OAAM,KAAMC,KAAI,QAAU,mCAC/BH,GAAUE,OAAM,KAAMC,KAAI,QAAO,2BAE5BC,IA+ILA,EACKjG,GAAE,YAAa4C,GACf5C,GAAE,YAAa4C,GAAc,GAC7B5C,GAAE,WAAa4C,GAAa,GAC5B5C,GAAE,YAAc4C,GAAa,GAC7B5C,GAAE,UAAY4C,GAAa,GAC3B5C,GAAE,WAAa4C,GACf5C,GAAE,QAAU4C,GAGjBJ,EAAM0D,UAAY,KAElB1D,EAAM8B,gBAAkB,SAAS6B,GACxBC,IACD5D,EAAM0D,WAAa1D,EAAM0D,UAAUF,KAAI,QAAWG,GACtD9H,EAAGO,IAAIqD,MAAM,WACT,GAAIoE,GAAOV,EAAKH,OAAM,4BACjBI,UAAS,QACTjD,KAAW,MAALwD,GAAc9H,EAAGG,MAAM8H,UAAUH,OAAUI,OACtDF,GAAKP,QACAC,OAAM,QACNC,KAAI,QAAU,gBACdA,KAAI,KAAO,SAASQ,GAAK,MAAOA,KAChCR,KAAI,KAAO,SAASQ,GAAK,MAAOA,KAChCR,KAAI,KAAO/B,GACX+B,KAAI,KAAM,GACfK,EAAKI,OAAOC,gBAnM5B,GAAI9C,IAAWC,KAAM,EAAGC,IAAK,GACrB2B,EAAQ,KACRC,EAAS,KACTjB,EAAS5F,GAAG8H,MAAMC,SAClB7H,EAAWF,GAAGE,SAAQ,mBAAqB,kBAAmB,eAAgB,kBAAmB,mBAAoB,kBACrHqH,GAAgB,EAChBH,EAAe,KACf1H,EAAUF,EAAGI,OAAOF,UACpB4E,EAAU5C,OAAOsG,aAyOzB,OAtOAtI,GACKuI,SAAS,GACTC,UAAU,GACVxC,QAAO,GA2LZ/B,EAAMzD,SAAWA,EACjByD,EAAMjE,QAAUA,EAEhBiE,EAAMoB,OAAS,SAASoD,GACpB,MAAKtH,WAAUe,QACfmD,EAAOE,IAA4B,mBAAZkD,GAAElD,IAAwBkD,EAAElD,IAASF,EAAOE,IACnEF,EAAOC,KAA4B,mBAAZmD,GAAEnD,KAAwBmD,EAAEnD,KAASD,EAAOC,KAC5DrB,GAHuBoB,GAMlCpB,EAAMiD,MAAQ,SAASuB,GACnB,MAAKtH,WAAUe,QACfgF,EAAQuB,EACDxE,GAFuBiD,GAKlCjD,EAAMkD,OAAS,SAASsB,GACpB,MAAKtH,WAAUe,QACfiF,EAASsB,EACFxE,GAFuBkD,GAKlClD,EAAMiC,OAAS,SAASuC,GACpB,MAAKtH,WAAUe,QACfgE,EAASuC,EACFxE,GAFuBiC,GAKlCjC,EAAM4D,cAAgB,SAASY,GAC3B,MAAKtH,WAAUe,QACf2F,EAAgBY,EACTxE,GAFuB4D,GAKlC5D,EAAMyD,aAAe,SAASe,GAC1B,MAAKtH,WAAUe,QACfwF,EAAee,EACRxE,GAFuByD,GAK3BzD,GAgBXnE,EAAG4I,kBAAoB,SAAUC,EAAQC,EAAWC,GAChD,YACA,MAAOF,YAAkB3H,QACrB,MAAO,KAEX,IAAI8H,EAEAA,GADqB,kBAAdD,GACM,SAASZ,GAClB,MAAOA,GAAEL,GAGAiB,CAEjB,IAAIE,GAAO,SAASd,EAAGe,GAUnB,MAAOF,GAAWb,GAAKe,GAGvB1C,EAAShG,GAAG2I,SAASF,GAAMzD,KAC3B4D,EAAQ5I,GAAG6I,KAAK,EAAG7C,EAAOqC,EAAOC,GAAa,IAC9CQ,EAAeN,EAAWH,EAAOO,GAMrC,IAJ4B,mBAAjBE,KACPA,EAAeF,GAGfE,IAAiBR,EACjB,MAAOM,EAGX,IAAIG,GAAY/I,GAAGgJ,KAAKJ,EAAM,EAAGP,EAAOzG,OAAS,IAC7CqH,EAAYT,EAAWH,EAAOU,GAMlC,OAJyB,mBAAdE,KACPA,EAAYF,GAGZG,KAAKC,IAAIF,EAAYX,IAAcY,KAAKC,IAAIL,EAAeR,GACpDM,EAEAG,GASfvJ,EAAG4J,kBAAoB,SAAUf,EAAQC,EAAWe,GAChD,YACA,IAAIC,GAAWC,EAAAA,EAAUC,EAAmB,IAQ5C,OAPAnB,GAAOoB,QAAQ,SAAS9B,EAAEnF,GACtB,GAAIkH,GAAQR,KAAKC,IAAIb,EAAYX,EACvB,OAALA,GAAsB2B,GAATI,GAA6BL,EAARK,IACnCJ,EAAWI,EACXF,EAAmBhH,KAGpBgH,GCvUXhK,EAAGI,OAAOF,QAAU,WAChB,YAyPA,SAASiK,KACL,IAAKjK,IAAYA,EAAQkK,OAAQ,CAG7B,GAAI9F,IAAQ,EACZpE,GAAUM,GAAG2G,OAAOkD,SAASC,MAAMnD,OAAM,IAAKoD,GAAIjG,KAAKA,GAEvDpE,EAAQuH,QAAQC,OAAM,OACdC,KAAI,QAAU,cAAgB6C,EAAUA,EAAU,eAClD7C,KAAI,KAAO4C,GACXE,MAAK,MAAQ,GAAGA,MAAK,OAAS,GAC9BA,MAAK,UAAY,GACjBA,MAAK,WAAa,SAClBlD,UAAS,sBAAuBmD,QAAQ3E,GAAsB,GAC9D2E,QAAQ3E,GAAsB,GAEtC7F,EAAQkI,OAAOC,UAKvB,QAASsC,KACL,MAAKC,IACAC,EAAiBvG,IAEtBtE,EAAGO,IAAIqD,MAAM,WACTuG,GAIA,IAAIW,GAAaC,EAAiBzG,EAC9BwG,KACA5K,EAAQkK,OAAOY,UAAYF,GAG/BG,MAGGN,GAhBP,OAjQJ,GAAIJ,GAAK,aAAeb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UAChC7G,EAAO,KACP8G,EAAU,IACVC,EAAW,GACXC,EAAe,EACfd,EAAU,KACVtE,GAAS,EACTwC,EAAY,IACZxI,EAAU,KACVqL,GAAiB/F,KAAM,KAAMC,IAAK,MAClCmF,GAAU,EACVnC,EAAW,IACX+C,GAAgB,EAChBzF,EAAuB,yBAI3B0F,EAAiB,SAAStD,EAAGnF,GAC7B,MAAOmF,IAIPuD,EAAkB,SAASvD,GAC3B,MAAOA,IAGPwD,EAAe,SAASxD,EAAGnF,GAC3B,MAAOmF,IAKP4C,EAAmB,SAAS5C,GAC5B,GAAU,OAANA,EACA,MAAO,EAGX,IAAIyD,GAAQpL,GAAG2G,OAAOkD,SAASwB,cAAa,SAC5C,IAAIL,EAAe,CACf,GAAIM,GAAaF,EAAMrE,UAAS,SAC3BjD,MAAM6D,IACNV,QAAQC,OAAM,QAEnBoE,GAAWpE,OAAM,MACZA,OAAM,MACNC,KAAI,UAAY,GAChBD,OAAM,UACNgD,QAAO,WAAY,GACnBqB,KAAKL,EAAgBvD,EAAE6D,QAGhC,GAAIC,GAAaL,EAAMrE,UAAS,SAC3BjD,MAAM6D,IACNV,QAAQC,OAAM,SAEfwE,EAAYD,EAAW1E,UAAS,MAC3BjD,KAAK,SAAS6H,GAAK,MAAOA,GAAEC,SAC5B3E,QACAC,OAAM,MACNgD,QAAO,YAAc,SAASyB,GAAK,MAAOA,GAAEE,WAErDH,GAAUxE,OAAM,MACXgD,QAAO,sBAAsB,GAC7BhD,OAAM,OACN+C,MAAK,mBAAqB,SAAS0B,GAAK,MAAOA,GAAEG,QAEtDJ,EAAUxE,OAAM,MACXgD,QAAO,OAAO,GACdA,QAAO,QAAS,SAASyB,GAAK,QAASA,EAAEI,QACzCR,KAAK,SAASI,EAAGnJ,GAAK,MAAO2I,GAAaQ,EAAEK,IAAKxJ,KAEtDkJ,EAAUxE,OAAM,MACXgD,QAAO,SAAS,GAChBqB,KAAK,SAASI,EAAGnJ,GAAK,MAAOyI,GAAeU,EAAEH,MAAOhJ,KAE1DkJ,EAAUO,OAAO,SAAUN,EAAEnJ,GAAK,MAAqBa,UAAdsI,EAAEO,UAAyBhF,OAAM,MACrEgD,QAAO,WAAY,GACnBqB,KAAK,SAASI,EAAGnJ,GAAK,MAAO,IAAMxC,GAAGmM,OAAM,KAAMR,EAAEO,SAAW,MAEpER,EAAU3E,UAAS,MAAOlD,KAAK,SAAS8H,GACpC,GAAIA,EAAEE,UAAW,CACb,GAAIO,GAAepM,GAAG8H,MAAMC,SAAS5B,QAAQ,EAAE,IAAIF,OAAK,OAAS0F,EAAEG,QAC/DO,EAAU,EACdrM,IAAG2G,OAAOpG,MACL0J,MAAK,sBAAwBmC,EAAaC,IAC1CpC,MAAK,mBAAqBmC,EAAaC,MAKpD,IAAId,GAAOH,EAAMxB,OAAO0C,SAGxB,OAFiBjJ,UAAbsE,EAAE4E,SACFhB,GAAQ,uBAAyB5D,EAAE4E,OAAS,UACzChB,GAYPiB,EAAW,WACX,GAAIC,IACAzH,KAAmB,OAAbhF,GAAGuE,MAAiBvE,GAAGuE,MAAMmI,QAAU,EAC7CzH,IAAkB,OAAbjF,GAAGuE,MAAiBvE,GAAGuE,MAAMoI,QAAU,EAGhD,IAAgD,QAA7CC,iBAAiB/C,SAASC,MAAM+C,UAAqB,CAGpD,GAAIC,GAASjD,SAASC,KAAKiD,uBAC3BN,GAAIzH,MAAQ8H,EAAO9H,KACnByH,EAAIxH,KAAO6H,EAAO7H,IAGtB,MAAOwH,IAGPpC,EAAmB,SAAS1C,GAC5B,GAAIA,GAAKA,EAAEiE,OAAQ,CACf,GAAIpM,EAAGG,MAAMqN,QAAQrF,EAAEiE,QACnB,OAAO,CAGX,IAAIpM,EAAGG,MAAMsN,SAAStF,EAAEiE,QAEpB,MADAjE,GAAEiE,QAAUjE,EAAEiE,SACP,EAGf,OAAO,GAKPsB,EAAoB,SAAST,GAC7B,GAIIzH,GAAMC,EAAKkI,EAJXtG,EAASnH,EAAQkK,OAAOwD,aACxBxG,EAAQlH,EAAQkK,OAAOyD,YACvBC,EAAczD,SAAS0D,gBAAgBD,YACvCE,EAAe3D,SAAS0D,gBAAgBC,YAI5C,QAAQ5C,GACJ,IAAK,IACD5F,GAAS4B,EAAQiE,EACjB5F,IAAS4B,EAAS,GACf4F,EAAIzH,KAAOA,EAAO,IAAGA,EAAO6F,IAC3BsC,EAAMV,EAAIxH,IAAMA,GAAO,IAAGA,GAAOkI,IACjCA,EAAMV,EAAIxH,IAAMA,EAAM4B,GAAU2G,IAAcvI,GAAOkI,EAAMK,EAC/D,MACJ,KAAK,IACDxI,EAAO6F,EACP5F,IAAS4B,EAAS,GACd4F,EAAIzH,KAAOA,EAAO4B,EAAQ0G,IAAatI,GAAS4B,EAAQiE,IACvDsC,EAAMV,EAAIxH,IAAMA,GAAO,IAAGA,GAAOkI,IACjCA,EAAMV,EAAIxH,IAAMA,EAAM4B,GAAU2G,IAAcvI,GAAOkI,EAAMK,EAChE,MACJ,KAAK,IACDxI,IAAU4B,EAAQ,GAAK,EACvB3B,EAAM4F,EACF4B,EAAIxH,IAAMA,EAAM4B,EAAS2G,IAAcvI,GAAQ4B,EAASgE,IACvDsC,EAAMV,EAAIzH,KAAOA,GAAQ,IAAGA,GAAQmI,IACpCA,EAAMV,EAAIzH,KAAOA,EAAO4B,GAAS0G,IAAatI,GAAQmI,EAAMG,EACjE,MACJ,KAAK,IACDtI,IAAU4B,EAAQ,GAClB3B,GAAQ4B,EAASgE,EACb4B,EAAIxH,IAAMA,EAAM,IAAGA,EAAM4F,IACxBsC,EAAMV,EAAIzH,KAAOA,GAAQ,IAAGA,GAAQmI,IACpCA,EAAMV,EAAIzH,KAAOA,EAAO4B,GAAS0G,IAAatI,GAAQmI,EAAMG,EACjE,MACJ,KAAK,SACDtI,IAAU4B,EAAQ,GAClB3B,IAAS4B,EAAS,EAClB,MACJ,SACI7B,EAAO,EACPC,EAAM,EAId,OAASD,KAAQA,EAAMC,IAAOA,IAM9BwF,EAAkB,WAClBjL,EAAGO,IAAIyD,KAAK,WACR,GAAIiJ,GAAMD,IACNiB,EAAgBP,EAAkBT,GAClCzH,EAAOyH,EAAIzH,KAAOyI,EAAczI,KAChCC,EAAMwH,EAAIxH,IAAMwI,EAAcxI,GAGlC,IAAIS,EACAhG,EACKgO,YACAC,aACAC,MAAM1F,GACND,SAAS,GACTgC,MAAK,UAAY,OACnB,CAEH,GAAI4D,GAAgB,aAAe9C,EAAa/F,KAAO,OAAS+F,EAAa9F,IAAM,MAC/E6I,EAAgB,aAAe5E,KAAK6E,MAAM/I,GAAQ,OAASkE,KAAK6E,MAAM9I,GAAO,MAC7E+I,EAAwBhO,GAAGiO,kBAAkBJ,EAAeC,GAC5DI,EAAYxO,EAAQuK,MAAK,WAAc,EAE3CvK,GACKgO,YACAC,aACA1F,SAASiG,EAAY,EAAIjG,GAEzBkG,WAAU,YAAc,SAAUxG,GAC/B,MAAOqG,IACR,aAEFG,WAAU,oBAAsB,SAAUxG,GACvC,MAAOqG,KAEV/D,MAAK,gBAAkB6D,GACvB7D,MAAK,UAAY,GAG1Bc,EAAa/F,KAAOA,EACpB+F,EAAa9F,IAAMA,IAuG3B,OAzDAkF,GAAU5E,qBAAuBA,EACjC4E,EAAUiE,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAK8J,GAE9CA,EAAUmE,SAAWC,OAAOC,WAExBvG,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GAAGF,EAASE,IACxEyC,SAAU6D,IAAK,WAAW,MAAO7D,IAAW8D,IAAK,SAASvG,GAAGyC,EAAQzC,IACrE0C,UAAW4D,IAAK,WAAW,MAAO5D,IAAY6D,IAAK,SAASvG,GAAG0C,EAAS1C,IACxE2C,cAAe2D,IAAK,WAAW,MAAO3D,IAAgB4D,IAAK,SAASvG,GAAG2C,EAAa3C,IACpF6B,SAAUyE,IAAK,WAAW,MAAOzE,IAAW0E,IAAK,SAASvG,GAAG6B,EAAQ7B,IACrEiC,SAAUqE,IAAK,WAAW,MAAOrE,IAAWsE,IAAK,SAASvG,GAAGiC,EAAQjC,IACrED,WAAYuG,IAAK,WAAW,MAAOvG,IAAawG,IAAK,SAASvG,GAAGD,EAAUC,IAC3EoC,kBAAmBkE,IAAK,WAAW,MAAOlE,IAAoBmE,IAAK,SAASvG,GAAGoC,EAAiBpC,IAChG8C,gBAAiBwD,IAAK,WAAW,MAAOxD,IAAkByD,IAAK,SAASvG,GAAG8C,EAAe9C,IAC1F+C,iBAAkBuD,IAAK,WAAW,MAAOvD,IAAmBwD,IAAK,SAASvG,GAAG+C,EAAgB/C,IAC7FgD,cAAesD,IAAK,WAAW,MAAOtD,IAAgBuD,IAAK,SAASvG,GAAGgD,EAAahD,IACpF6C,eAAgByD,IAAK,WAAW,MAAOzD,IAAiB0D,IAAK,SAASvG,GAAG6C,EAAc7C,IACvFqE,UAAWiC,IAAK,WAAW,MAAOjC,IAAYkC,IAAK,SAASvG,GAAGqE,EAASrE,IAGxEwG,gBAAiBF,IAAK,WAAW,MAAO5E,UAASC,MAAQ4E,IAAK,SAASvG,GAEnE3I,EAAGqC,WAAU,iBAAmB,iCAEpC+M,UAAWH,IAAK,WAAW,MAAO,OAAQC,IAAK,SAASvG,GAEpD3I,EAAGqC,WAAU,WAAa,iCAE9BgN,QAASJ,IAAK,WAAW,OAAQzJ,KAAM,EAAGC,IAAK,IAAMyJ,IAAK,SAASvG,GAE/D3I,EAAGqC,WAAU,SAAW,0CAI5B6D,QAAS+I,IAAK,WAAW,MAAO/I,IAAUgJ,IAAK,SAASvG,GAChDzC,GAAUyC,IACVzC,IAAWyC,EACXgC,OAGRrG,MAAO2K,IAAK,WAAW,MAAO3K,IAAQ4K,IAAK,SAASvG,GAE5CA,EAAE2G,QACF3G,EAAEqD,MAAQrD,EAAE2G,MAAMxH,EAClBa,EAAEyD,OAASzD,EAAEyD,WACbzD,EAAEyD,OAAOJ,MAAQrD,EAAE2G,MAAMC,EACzB5G,EAAEyD,OAAOE,MAAQ3D,EAAE2G,MAAMhD,OAAS3D,EAAEyD,OAAOE,OAE/ChI,EAAOqE,IAIXyB,MAAO6E,IAAK,WAAW,MAAO/O,GAAQkK,QAAU8E,IAAK,SAASvG,MAC9D4B,IAAK0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,QAGpD3I,EAAGG,MAAMqP,YAAY7E,GACdA,GC7VX3K,EAAGG,MAAMsP,WAAa,WAElB,GAAIC,IAAQtI,MAAO,IAAKC,OAAQ,IAGhC,OAAInF,QAAOyN,YAAczN,OAAO0N,aAC5BF,EAAKtI,MAAQlF,OAAOyN,WACpBD,EAAKrI,OAASnF,OAAO0N,YACd,GAIY,cAAnBvF,SAASwF,YACTxF,SAAS0D,iBACT1D,SAAS0D,gBAAgBF,aAEzB6B,EAAKtI,MAAQiD,SAAS0D,gBAAgBF,YACtC6B,EAAKrI,OAASgD,SAAS0D,gBAAgBH,aAChC,GAIPvD,SAASC,MAAQD,SAASC,KAAKuD,aAC/B6B,EAAKtI,MAAQiD,SAASC,KAAKuD,YAC3B6B,EAAKrI,OAASgD,SAASC,KAAKsD,aACrB,GAGJ,GAOX5N,EAAGG,MAAMqN,QAAUtM,MAAMsM,QACzBxN,EAAGG,MAAMsN,SAAW,SAASqC,GACzB,MAAa,QAANA,GAA2B,gBAANA,IAEhC9P,EAAGG,MAAM4P,WAAa,SAASD,GAC3B,MAAoB,kBAANA,IAElB9P,EAAGG,MAAM6P,OAAS,SAASF,GACvB,MAA4B,kBAArBG,SAAS7O,KAAK0O,IAEzB9P,EAAGG,MAAM+P,SAAW,SAASJ,GACzB,OAAQK,MAAML,IAAmB,gBAANA,IAO/B9P,EAAGG,MAAMiQ,aAAe,SAASC,GAO7B,MANInO,QAAOoO,iBACPpO,OAAOoO,iBAAgB,SAAWD,GAElCrQ,EAAGiC,IAAG,gDAAkDoO,IAIxDlN,SAAUkN,EACVE,MAAO,WACHrO,OAAOsO,oBAAmB,SAAWH,MAYjDrQ,EAAGG,MAAMsQ,SAAW,SAASnE,GAEzB,GAAczI,SAAVyI,EACA,MAAOtM,GAAGG,MAAMuQ,cAGb,IAAG1Q,EAAGG,MAAMqN,QAAQlB,GAAQ,CAC/B,GAAIqE,GAAcnQ,GAAG8H,MAAMsI,UAAUnK,MAAM6F,EAC3C,OAAO,UAASnE,EAAGnF,GACf,GAAIwJ,GAAY3I,SAANb,EAAkBmF,EAAInF,CAChC,OAAOmF,GAAEmE,OAASqE,EAAYnE,IAOlC,MAAOF,IASftM,EAAGG,MAAMuQ,aAAe,WAEpB,MAAO1Q,GAAGG,MAAMsQ,SAASjQ,GAAG8H,MAAMuI,aAAapK,UAQnDzG,EAAGG,MAAM2Q,YAAc,SAASC,EAAYC,EAAQC,GAEhDD,EAASA,GAAU,SAAS5E,GAAU,MAAOA,GAAOI,KACpDyE,EAAgBA,GAAiBzQ,GAAG8H,MAAMuI,aAAapK,OAGvD,IAAIyK,GAAWD,EAAc7O,MAE7B,OAAO,UAASgK,EAAQhD,GACpB,GAAIoD,GAAMwE,EAAO5E,EACjB,OAAIpM,GAAGG,MAAM4P,WAAWgB,EAAWvE,IACxBuE,EAAWvE,KACS3I,SAApBkN,EAAWvE,GACXuE,EAAWvE,IAGb0E,IAEDA,EAAWD,EAAc7O,QAE7B8O,GAAsB,EACfD,EAAcC,MAWjClR,EAAGG,MAAMgR,KAAO,SAASC,EAAOC,GAE5B,GAAIC,GAAO,SAASC,GAChB/Q,GAAGuL,KAAKwF,EAAM,SAASC,GACnB,GAAItM,GAAS1E,GAAG2G,OAAOkK,GAASjH,MAChClF,GAAOuM,WAAWC,aACdlR,GAAG2G,OAAOqK,GAAUrK,OAAOkK,GAASjH,OACpClF,GACJlF,EAAGG,MAAMgR,KAAKC,EAAOC,KAI7B7Q,IAAG+G,UAAU6J,GAAOzP,GAAE,QAAU,WAC5BgQ,QAAQC,UAAU7Q,KAAKwQ,KAAMxQ,KAAK8Q,YAAa9Q,KAAKwQ,MACpDD,EAAKvQ,KAAKwQ,MACV/Q,GAAGuE,MAAM+M,mBAGbtR,GAAG2G,OAAOjF,QAAQP,GAAE,WAAa,WACzBnB,GAAGuE,MAAMgN,OACTT,EAAK9Q,GAAGuE,MAAMgN,UAW1B/R,EAAGG,MAAM6R,oBAAsB,SAAUC,GACrC,GAAIjS,EAAGG,MAAM4P,WAAWkC,EAAYxH,QAAUzK,EAAGG,MAAM4P,WAAWkC,EAAYC,MAAO,CACjF,GAAIC,GAAWC,SAASH,EAAYxH,MAAK,aAAc4H,QAAO,KAAI,IAAO,IACrEC,EAAaL,EAAYC,OAAO9P,MACpC,OAAOpC,GAAGG,MAAM8H,UAAUqK,EAAaH,EAAW,IAEtD,MAAO,IAOXnS,EAAGG,MAAM8H,UAAY,SAASsK,GAC1B,OAAKvS,EAAGG,MAAM+P,SAASqC,IAChBpC,MAAMoC,IACA,OAANA,GACAA,IAAMxI,EAAAA,GACNwI,MAAOxI,EAAAA,GAEH,EAEJwI,GAMX/R,GAAG4D,UAAUxD,UAAU4R,gBAAkB,SAASC,GAC9C,GAAIC,IAAQ3R,MAAMW,UAAUP,MAAMC,KAAKC,UAAW,GAClD,OAAOoR,GAAYtE,WAAW1M,MAAMgR,EAAaC,IAOrD1S,EAAGG,MAAMsS,YAAc,SAAS/R,EAAU+H,GACtC,KAAM1H,eAAgBf,GAAGG,MAAMsS,aAC3B,MAAO,IAAIzS,GAAGG,MAAMsS,YAAY/R,EAAU+H,EAG9C,IAAIkK,GAAyB9O,SAAb4E,EAAyBA,EAAW,IAChDmK,KACAC,EAAO9R,IAEXA,MAAKX,OAAS,SAASA,GAevB,MAdIA,MAAYe,MAAMC,KAAKC,UAAW,GAClCjB,EAAO6J,QAAQ,SAAS6I,GACpBA,EAAMC,YAAa,EACnB,SAAUC,GACNA,EAAEtS,SAASiB,GAAE,YAAc,SAASsR,GAChCD,EAAED,YAAa,EACfF,EAAKK,UAAS,YAEnBJ,GAECF,EAAYO,QAAQL,GAAS,GAC7BF,EAAYnP,KAAKqP,KAGtB/R,MAGPA,KAAKqS,MAAQ,SAAS3K,GACD5E,SAAb4E,IACAkK,EAAYlK,GAEhBmK,MAGJ7R,KAAKoN,WAAa,SAAS/J,EAAWsO,EAAMjK,GAcxC,GAbAiK,EAAOrR,UAAUe,OAAS,KAAOjB,MAAMC,KAAKC,UAAW,MAGnDoH,EADAiK,EAAKtQ,OAAS,EACHsQ,EAAKW,MAESxP,SAAd8O,EAA0BA,EAAY,IAErDvO,EAAU2O,YAAa,EAEnBH,EAAYO,QAAQ/O,GAAa,GACjCwO,EAAYnP,KAAKW,GAGJ,IAAbqE,EAIA,MAHArE,GAAU2O,YAAa,EACvB3O,EAAUgK,MAAQ,WAAa,MAAOrN,OACtCqD,EAAUqE,SAAW,WAAa,MAAO1H,OAClCqD,CAEkB,KAArBA,EAAUhC,OACVgC,EAAU2O,YAAa,EAChB3O,EAAUkP,MAAO,SAASnL,GAAI,OAAQA,EAAE/F,SAC/CgC,EAAU2O,YAAa,EAEvB3O,EAAU2O,YAAa,CAG3B,IAAIR,GAAI,CACR,OAAOnO,GACF+J,aACA1F,SAASA,GACTpE,KAAK,aAAckO,IACnBlO,KAAI,MAAQ,SAAS8D,EAAGnF,GACT,MAANuP,IACFnO,EAAU2O,YAAa,EACvBF,EAAKK,UAAUzR,MAAMV,KAAM2R,OAM/C3R,KAAKmS,UAAY,WACTN,EAAYU,MAAO,SAASnL,GAAI,MAAOA,GAAE4K,eACzCH,EAAY3I,QAAS,SAAS9B,GAAIA,EAAE4K,YAAa,IACjDrS,EAASwS,UAAUzR,MAAMV,KAAMM,cAY3CrB,EAAGG,MAAMoT,WAAa,SAASC,GAC3B,GAAIC,GAAUpS,UAAUe,OAAS,KAAOjB,MAAMC,KAAKC,UAAW,KAC9DoS,GAAQxJ,QAAQ,SAASyJ,GACrB,IAAK,GAAIlH,KAAOkH,GAAQ,CACpB,GAAIlG,GAAUxN,EAAGG,MAAMqN,QAAQgG,EAAIhH,IAC/BiB,EAAWzN,EAAGG,MAAMsN,SAAS+F,EAAIhH,IACjCmH,EAAS3T,EAAGG,MAAMsN,SAASiG,EAAOlH,GAElCiB,KAAaD,GAAWmG,EACxB3T,EAAGG,MAAMoT,WAAWC,EAAIhH,GAAMkH,EAAOlH,IAErCgH,EAAIhH,GAAOkH,EAAOlH,OAUlCxM,EAAGG,MAAM4R,MAAQ,WACb,KAAMhR,eAAgBf,GAAGG,MAAM4R,OAC3B,MAAO,IAAI/R,GAAGG,MAAM4R,KAExB,IAAIA,MAEA6B,EAAY,aACZC,EAAY,WAAY,UACxBC,EAAO,KACPC,EAAU,IAEdhT,MAAKL,SAAWF,GAAGE,SAAQ,SAAW,OAEtCK,KAAKL,SAASiB,GAAE,MAAQ,SAASoQ,GAC7B6B,EAAU7B,GAAO,KAGrBhR,KAAKiT,OAAS,SAASC,GAEnB,MADAJ,GAAYI,EACLlT,MAGXA,KAAKmT,OAAS,SAASD,EAAI9Q,GAUvB,MATKA,KACDA,EAAW,cAEfyQ,EAAY,SAAS7B,EAAOoC,GACxBF,EAAGlC,GACCoC,GACAhR,KAGDpC,MAGXA,KAAK+S,KAAO,SAAS/B,GACjB+B,EAAOA,MACP9T,EAAGG,MAAMoT,WAAWO,EAAM/B,GAG9B,IAAIqC,GAAO,WACP,GAAIC,GAAWR,GAEf,IAAIS,KAAKC,UAAUF,KAAcC,KAAKC,UAAUxC,GAC5C,OAAO,CAGX,KAAK,GAAIvF,KAAO6H,GACOxQ,SAAfkO,EAAMvF,KACNuF,EAAMvF,OAEVuF,EAAMvF,GAAO6H,EAAS7H,GACtBuH,GAAU,CAEd,QAAO,EAGXhT,MAAKoT,OAAS,WACNL,IACAF,EAAUE,GAAM,GAChBA,EAAO,MAEPM,EAAKhT,KAAKL,OACVA,KAAKL,SAAS8T,OAAOzC,KAkBjC/R,EAAGG,MAAM0O,YAAc,SAAS6D,GAQ5B,MAPIA,IACAlS,GAAGiU,IAAI/B,GAAMzI,QAAO,SAAWuC,EAAIR,GAC3BhM,EAAGG,MAAM4P,WAAWhP,KAAKyL,KACzBzL,KAAKyL,GAAKR,IAEfnL,KAAKE,OAELA,MAWXf,EAAGG,MAAMuU,WAAa,SAASC,EAAUrQ,GAErC,GAAIsQ,GAAY,EACZ5R,EAAI,CACR,KAAKA,EAAGA,EAAIsB,EAAKlC,OAAQY,GAAK,EAAG,CAC7B,GAAI6R,GAAavQ,EAAKtB,IAAMsB,EAAKtB,GAAG6F,OAASvE,EAAKtB,GAAG6F,OAAOzG,OAAS,CACrEwS,GAAYC,EAAaD,EAAYC,EAAaD,EAWtD,MATA5U,GAAGiC,IAAG,8BAAgC0S,GACtC3U,EAAGiC,IAAG,gCAAkC2S,GAExCD,EAAWA,EAAWC,EAAYD,EAAWC,EAAY,EAAID,EAE7DA,EAAsB,EAAXA,EAAe,EAAIA,EAE9BA,EAAWjL,KAAKwB,MAAMyJ,GACtB3U,EAAGiC,IAAG,8BAAgC0S,GAC/BA,GAOX3U,EAAGG,MAAM2U,WAAa,SAASH,EAAUrQ,GAErC,MAAOtE,GAAGG,MAAMuU,WAAWC,EAAUrQ,IAYzCtE,EAAGG,MAAM4U,WAAa,SAASjS,EAAOR,GAE9BQ,EAAMkS,QAAUlS,EAAMkS,OAAO1S,GAC7BQ,EAAMR,GAAQQ,EAAMkS,OAAO1S,IAE3BQ,EAAMR,GAAQ,SAAUqG,GACpB,MAAKtH,WAAUe,QACfU,EAAMmS,WAAW3S,IAAQ,EACzBQ,EAAMgM,SAASxM,GAAQqG,EAChB7F,GAHuBA,EAAMgM,SAASxM,IAOjDQ,EAAK,IAAOR,GAAQ,SAASqG,GACzB,MAAKtH,WAAUe,QACVU,EAAMmS,WAAW3S,KAClBQ,EAAMgM,SAASxM,GAAQqG,GAEpB7F,GAJuBA,EAAMgM,SAASxM,MAazDtC,EAAGG,MAAMqP,YAAc,SAAS1M,GAC5BA,EAAMmS,WAAanS,EAAMmS,cACzB,IAAIC,GAAMnG,OAAOoG,oBAAoBrS,EAAMgM,cACvCsG,EAAQrG,OAAOoG,oBAAoBrS,EAAMkS,WAC7CE,GAAMA,EAAIxT,OAAO0T,EACjB,KAAK,GAAIpS,KAAKkS,GACVlV,EAAGG,MAAM4U,WAAWjS,EAAOoS,EAAIlS,KAUvChD,EAAGG,MAAMkV,iBAAmB,SAASnQ,EAAQoQ,EAAWC,GACpDrQ,EAAOsQ,WAAaD,EAAO7T,OAAOwD,EAAOsQ,gBACzCD,EAAOE,QAAQH,GACfC,EAAOE,QAAQvQ,GACf1E,GAAGkV,OAAOjU,MAAMV,KAAMwU,IAO1BvV,EAAGG,MAAMwV,YAAc,SAAS7F,GAC5B,MAAOA,GAAE8F,OAAOnJ,OAAO,SAASoJ,EAAM5I,GAClC,OAAQA,GAAO4I,GAAQ/F,EAAE7C,EAAM,MAUvCjN,EAAGG,MAAM2V,UAAYtV,GAAGiU,MAMxBzU,EAAGG,MAAM4V,OAAS,WAGd,QAASA,GAAO5N,EAAEnF,GACd,GAAIgT,GAAItQ,EAAKtE,KAAKL,KAAKoH,EAAEnF,GACrBiT,EAAIvG,EAAKtO,KAAKL,KAAKoH,EAAEnF,EACzB,OAAsC,KAAlCxC,GAAG0V,IAAIC,YAAYhD,QAAQ6C,GACpBxV,GAAG0V,IAAIH,SAASrQ,KAAKsQ,GAAGtG,KAAKuG,KAE7BjW,EAAGG,MAAM2V,UAAU7G,IAAI+G,GAAGC,GARzC,GAAIvQ,GACAgK,EAAO,EAoBX,OAVAqG,GAAOrQ,KAAO,SAASiD,GACnB,MAAKtH,WAAUe,QACfsD,EAAOlF,GAAG4V,QAAQzN,GACXoN,GAFuBrQ,GAIlCqQ,EAAOrG,KAAO,SAAS/G,GACnB,MAAKtH,WAAUe,QACfsN,EAAOlP,GAAG4V,QAAQzN,GACXoN,GAFuBrG,GAI3BqG,GAUX/V,EAAGG,MAAMkW,eAAiB,SAASnR,EAAQwO,GAEvC,GAAIwB,GAAMnG,OAAOoG,oBAAoBzB,EAAO5E,cACxCsG,EAAQrG,OAAOoG,oBAAoBzB,EAAOsB,YAC1CsB,EAAY5C,EAAO6C,eACnBC,EAAQ9C,EAAO8B,eACf9C,EAAOwC,EAAIxT,OAAO0T,GAAO1T,OAAO4U,GAAW5U,OAAO8U,EACtD9D,GAAK+C,QAAQ/B,GACbhB,EAAK+C,QAAQvQ,GACb1E,GAAGkV,OAAOjU,MAAMV,KAAM2R,GAEtBxN,EAAOqR,WAAavW,EAAGG,MAAMwV,YAAYT,EAAIxT,OAAO0T,GAAO1T,OAAO4U,GAAW5U,OAAOwT,GAAKxT,OAAOwD,EAAOqR,iBACvGrR,EAAOsQ,WAAaxV,EAAGG,MAAMwV,YAAYa,EAAM9U,OAAOwD,EAAOsQ,kBAOjExV,EAAGG,MAAMsW,QAAU,SAASP,GACxBA,EAAIxL,SAAOgM,YAAa,KAO5B1W,EAAGG,MAAMwW,eAAiB,SAAStP,EAAQH,GACvC,MAAQG,IAAU+K,SAASlL,EAAUuD,MAAK,UAAY,KAAO,KAOjEzK,EAAGG,MAAMyW,cAAgB,SAASxP,EAAOF,GACrC,MAAQE,IAASgL,SAASlL,EAAUuD,MAAK,SAAW,KAAO,KAO/DzK,EAAGG,MAAMyF,gBAAkB,SAASyB,EAAQH,EAAW3B,GACnD,MAAOmE,MAAKL,IAAI,EAAErJ,EAAGG,MAAMwW,eAAetP,EAAQH,GAAa3B,EAAOE,IAAMF,EAAOsR,SAMvF7W,EAAGG,MAAMwF,eAAiB,SAASyB,EAAOF,EAAW3B,GACjD,MAAOmE,MAAKL,IAAI,EAAErJ,EAAGG,MAAMyW,cAAcxP,EAAOF,GAAa3B,EAAOC,KAAOD,EAAOuR,QAMtF9W,EAAGG,MAAM4W,OAAS,SAASjU,EAAOoE,GAC9B,GAAI8P,GAAMlU,EAAM8L,UACZrJ,EAASyR,EAAIzR,SACbwR,EAASC,EAAID,SACbzS,EAAkB,MAAVyS,GAAkB,uBAA0BA,GACpD1P,EAASrH,EAAGG,MAAMyF,gBAAgB,KAAMsB,EAAW3B,GACnD6B,EAAQpH,EAAGG,MAAMwF,eAAe,KAAMuB,EAAW3B,GACjDuC,EAAIvC,EAAOC,KAAO4B,EAAM,EACxBmI,EAAIhK,EAAOE,IAAM4B,EAAO,CAG5BH,GAAUK,UAAS,KAAMc,QAEzB,IAAI4O,GAAa/P,EAAUK,UAAS,cAAejD,KAAKA,EAExD2S,GAAWxP,QAAQC,OAAM,QACpBC,KAAI,QAAU,kBACdA,KAAI,KAAO,SACX8C,MAAK,cAAgB,UAE1BwM,EACKtP,KAAI,IAAMG,GACVH,KAAI,IAAM4H,GACV2C,KAAK,SAAS8D,GAAI,MAAOA,MAMlChW,EAAGG,MAAM+W,UAAY,SAAUhF,EAAM9K,GACjC8K,EAAK7N,KAAK,WAUN,IATA,GAEI8S,GAFAjF,EAAO1R,GAAG2G,OAAOpG,MACjBqW,EAAQlF,EAAKA,OAAOmF,MAAK,OAAQC,UAEjCtP,KACAuP,EAAa,EACbC,EAAa,IACbjI,EAAI2C,EAAKvK,KAAI,KACb8P,EAAKC,WAAWxF,EAAKvK,KAAI,OACzBgQ,EAAQzF,EAAKA,KAAK,MAAMxK,OAAM,SAAUC,KAAI,IAAM,GAAGA,KAAI,IAAM4H,GAAG5H,KAAI,KAAO8P,EAAK,MAC/EN,EAAOC,EAAM/D,OAChBrL,EAAKvE,KAAK0T,GACVQ,EAAMzF,KAAKlK,EAAK4P,KAAI,MAChBD,EAAMvN,OAAOyN,wBAA0BzQ,IACvCY,EAAKqL,MACLsE,EAAMzF,KAAKlK,EAAK4P,KAAI,MACpB5P,GAAQmP,GACRQ,EAAQzF,EAAKxK,OAAM,SAAUC,KAAI,IAAM,GAAGA,KAAI,IAAM4H,GAAG5H,KAAI,OAAS4P,EAAaC,EAAaC,EAAK,MAAMvF,KAAKiF,OAS9HnX,EAAGG,MAAM2X,YAAc,SAAUC,EAAQC,GACrC,GAAID,IAAWC,EACX,OAAO,CAEX,KAAKD,IAAWC,EACZ,OAAO,CAGX,IAAID,EAAO3V,QAAU4V,EAAO5V,OACxB,OAAO,CAEX,KAAK,GAAIY,GAAI,EACTiV,EAAIF,EAAO3V,OAAY6V,EAAJjV,EAAOA,IAE1B,GAAI+U,EAAO/U,YAAc9B,QAAS8W,EAAOhV,YAAc9B,QAEnD,IAAKlB,EAAG8X,YAAYC,EAAO/U,GAAIgV,EAAOhV,IAClC,OAAO,MACR,IAAI+U,EAAO/U,IAAMgV,EAAOhV,GAE3B,OAAO,CAGf,QAAO,GCpsBXhD,EAAGI,OAAO8X,KAAO,WACb,YAqCA,SAASpV,GAAMsB,GAgTX,MA/SAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EAGjB,IAAII,GAAOJ,EAAUK,UAAS,qBAAsBjD,MAAMA,IACtDkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,wBAEnDwQ,GADS3Q,EAAUE,OAAM,KACrBJ,EAAKH,OAAM,KAEL,QAAViR,EACAF,EAAKE,MAAMA,IACW,OAAjBF,EAAKG,UAAsC,UAAjBH,EAAKG,WACpCH,EAAKE,MAAM1O,KAAKC,IAAIrB,EAAM7B,QAAQ,GAAK6B,EAAM7B,QAAQ,IAAM,KAG/D0R,EAAE3F,gBAAgBC,EAAa,QAAQrR,KAAK8W,GAE5CI,EAASA,GAAUJ,EAAK5P,OAExB,IAAIiQ,GAAML,EAAKM,YACJ,OAAPD,IACAA,EAAMD,EAAOE,aAGjB,IAAIC,GAAYN,EAAE5Q,UAAS,qBACtBjD,MAAMoU,GAAiB,MAC5BD,GAAUrQ,OAAOC,SAGAxE,SAAbsO,GACAgG,EAAE5Q,UAAS,KAAMJ,OAAM,QAASsD,MAAK,YAAc0H,EAGvD,IAAIwG,GACAC,EACAC,CACJ,QAAQX,EAAKG,UACT,IAAK,MACDI,EAAUhR,QAAQC,OAAM,QAASC,KAAI,QAAU,gBACjDkR,EAAI,EACyB,IAAzBvQ,EAAM7B,QAAQrE,OAChByW,EAAIC,EAA+B,EAAnBxQ,EAAM7B,QAAQ,GAAS6B,EAAM5B,YAAc,EACzB,IAAzB4B,EAAM7B,QAAQrE,OACvByW,EAAIC,EAAYxQ,EAAM7B,QAAQ,GAAK6B,EAAM7B,QAAQ,GAAK6B,EAAM5B,YAAc4B,EAAM7B,QAAQ,GAC9E6B,EAAM7B,QAAQrE,OAAS,IACjCyW,EAAIvQ,EAAM7B,QAAQ6B,EAAM7B,QAAQrE,OAAO,IAAIkG,EAAM7B,QAAQ,GAAG6B,EAAM7B,QAAQ,KAE1EgS,EACK9Q,KAAI,cAAgB,UACpBA,KAAI,IAAM,GACVA,KAAI,IAAMkR,EAAE,GACbE,IACAH,EAAatR,EAAKC,UAAS,mBACtBjD,KAAKgE,EAAM3B,UAChBiS,EAAWnR,QAAQC,OAAM,KAAMC,KAAI,QAAS,SAASQ,EAAEnF,GAC/C,OAAO,gBAAe,kBAA0B,GAALA,EAAS,eAAa,gBAAmB4U,KAAI,OAC7FlQ,OAAM,QACTkR,EAAWxQ,OAAOC,SAClBuQ,EACKjR,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,aAAehD,EAAGG,MAAM8H,UAAUK,EAAMH,IAAM,QAExDhB,OAAM,QACNQ,KAAI,KAAO,UACXA,KAAI,KAAOuQ,EAAKc,eAChBrR,KAAI,cAAgB,UACpBuK,KAAK,SAAS/J,EAAEnF,GACb,GAAIkG,GAAIqP,EAAIpQ,EACZ,QAAO,GAAMe,GAAG5D,MAAK,OAAU,GAAK4D,IAE5C0P,EAAWpG,gBAAgBC,EAAa,eACnC9K,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,aAAehD,EAAGG,MAAM8H,UAAUK,EAAM7B,QAAQzD,IAAM,QAGzE,MACJ,KAAK,SACD2V,EAAeM,EAAoB,EACnC,IAAIC,GAAe,GACfC,EAAa,EACbC,EAASjB,EAAE5Q,UAAS,KAAMJ,OAAM,QAChCkS,EAAmB,EACvB,IAAIC,EAAa,IAAK,CAElBF,EAAOzR,KAAI,YAAc,IAEzByR,EAAO/U,KAAK,SAAS8D,EAAEnF,GACnB,GAAIuW,GAAMxY,KAAKwM,wBACXnG,EAAQmS,EAAInS,KAChB+R,GAAaI,EAAIlS,OACdD,EAAQ8R,IAAcA,EAAe9R,KAE5CiS,EAAmB,UAAYC,EAAe,OAASH,EAAW,EAAIjB,EAAKc,eAAiB,GAE5F,IAAIQ,GAAM9P,KAAKC,IAAID,KAAK8P,IAAIF,EAAa5P,KAAK+P,GAAG,KACjDd,IAAgBa,EAAMA,EAAIN,EAAeA,GAAc,GAEvDE,EACKzR,KAAI,YAAc0R,GAClB5O,MAAK,cAAgB6O,EAAa,IAAM,EAAI,QAAU,WAEvDI,GACAN,EACKzR,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,gBAAkBA,EAAI,GAAK,EAAI,IAAM,MAAQ,MAG5DoW,EAAOzR,KAAI,YAAc,iBAGjC8Q,GAAUhR,QAAQC,OAAM,QAASC,KAAI,QAAU,gBAC/CkR,EAAI,EACyB,IAAzBvQ,EAAM7B,QAAQrE,OACdyW,EAAIC,EAA+B,EAAnBxQ,EAAM7B,QAAQ,GAAS6B,EAAM5B,YAAc,EAC3B,IAAzB4B,EAAM7B,QAAQrE,OACrByW,EAAIC,EAAYxQ,EAAM7B,QAAQ,GAAK6B,EAAM7B,QAAQ,GAAK6B,EAAM5B,YAAc4B,EAAM7B,QAAQ,GAChF6B,EAAM7B,QAAQrE,OAAS,IAC/ByW,EAAIvQ,EAAM7B,QAAQ6B,EAAM7B,QAAQrE,OAAO,IAAIkG,EAAM7B,QAAQ,GAAG6B,EAAM7B,QAAQ,KAE9EgS,EACK9Q,KAAI,cAAgB,UACpBA,KAAI,IAAMgR,GACVhR,KAAI,IAAMkR,EAAE,GACbE,IAEAH,EAAatR,EAAKC,UAAS,mBAEtBjD,MAAMgE,EAAM3B,SAAS,GAAI2B,EAAM3B,SAAS2B,EAAM3B,SAASvE,OAAS,KACrEwW,EAAWnR,QAAQC,OAAM,KAAMC,KAAI,QAAS,SAASQ,EAAEnF,GAC/C,OAAO,gBAAe,kBAA0B,GAALA,EAAS,eAAa,gBAAmB4U,KAAI,OAC7FlQ,OAAM,QACTkR,EAAWxQ,OAAOC,SAClBuQ,EACKjR,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,aAAehD,EAAGG,MAAM8H,UAAWK,EAAMH,IAAM2Q,EAAYxQ,EAAM5B,YAAc,EAAI,IAAO,QAEpGS,OAAM,QACNQ,KAAI,KAAO,SACXA,KAAI,IAAMuQ,EAAKc,eACfrR,KAAI,YAAc0R,GAClB5O,MAAK,cAAgB6O,EAAgBA,EAAa,IAAM,EAAI,QAAU,MAAS,UAC/EpH,KAAK,SAAS/J,EAAEnF,GACb,GAAIkG,GAAIqP,EAAIpQ,EACZ,QAAO,GAAMe,GAAG5D,MAAK,OAAU,GAAK4D,IAE5C0P,EAAWpG,gBAAgBC,EAAa,kBACnC9K,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,aAAehD,EAAGG,MAAM8H,UAAWK,EAAMH,IAAM2Q,EAAYxQ,EAAM5B,YAAc,EAAI,IAAO,QAI7G,MACJ,KAAK,QACD+R,EAAUhR,QAAQC,OAAM,QAASC,KAAI,QAAU,gBAC/C8Q,EACKhO,MAAK,cAAgBkP,EAAe,SAAW,SAC/ChS,KAAI,YAAcgS,EAAe,aAAe,IAChDhS,KAAI,IAAMgS,GAAiBjQ,KAAKL,IAAI9D,EAAOuR,MAAO1P,GAAS,IAAM6R,GAAqB,GAAM,KAC5FtR,KAAI,IAAMgS,EAAgBnZ,GAAG6I,IAAIf,EAAM7B,SAAW,EAAKyR,EAAKc,eAC7DD,IACAH,EAAatR,EAAKC,UAAS,mBACtBjD,KAAKgE,EAAM3B,UAChBiS,EAAWnR,QAAQC,OAAM,KAAMC,KAAI,QAAS,SAASQ,EAAEnF,GAC/C,OAAO,gBAAe,kBAA0B,GAALA,EAAS,eAAa,gBAAmB4U,KAAI,OAC7FlQ,OAAM,QACJ+C,MAAK,UAAY,GACtBmO,EAAWxQ,OAAOC,SAClBuQ,EACKjR,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,eAAiBhD,EAAGG,MAAM8H,UAAUK,EAAMH,IAAM,MAE1DhB,OAAM,QACNQ,KAAI,KAAO,SACXA,KAAI,IAAM,GACVA,KAAI,IAAMuQ,EAAKc,eACfvO,MAAK,cAAgB,SACrByH,KAAK,SAAS/J,EAAGnF,GACd,GAAIkG,GAAIqP,EAAIpQ,EACZ,QAAO,GAAMe,GAAG5D,MAAK,OAAU,GAAK4D,IAE5C0P,EAAWpG,gBAAgBC,EAAa,iBACnC9K,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,eAAiBhD,EAAGG,MAAM8H,UAAUK,EAAM7B,QAAQzD,IAAM,MAElEmE,OAAM,QACNsD,MAAK,UAAY,GAE1B,MACJ,KAAK,OASDgO,EAAUhR,QAAQC,OAAM,QAASC,KAAI,QAAU,gBAC/C8Q,EACKhO,MAAK,cAAgBkP,EAAe,SAAW,OAC/ChS,KAAI,YAAcgS,EAAe,cAAgB,IACjDhS,KAAI,IAAMgS,GAAiBjQ,KAAKL,IAAI9D,EAAOC,KAAM4B,GAAS,IAAM6R,GAAqB,GAAM,KAC3FtR,KAAI,IAAMgS,GAAiBnZ,GAAG6I,IAAIf,EAAM7B,SAAW,GAAMyR,EAAKc,eAC/DD,IACAH,EAAatR,EAAKC,UAAS,mBACtBjD,KAAKgE,EAAM3B,UAChBiS,EAAWnR,QAAQC,OAAM,KAAMC,KAAI,QAAS,SAASQ,EAAEnF,GAC/C,OAAO,gBAAe,kBAA0B,GAALA,EAAS,eAAa,gBAAmB4U,KAAI,OAC7FlQ,OAAM,QACJ+C,MAAK,UAAY,GACtBmO,EAAWxQ,OAAOC,SAClBuQ,EACKjR,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,eAAiBhD,EAAGG,MAAM8H,UAAUqQ,EAAOnQ,IAAM,MAE3DhB,OAAM,QACNQ,KAAI,KAAO,SACXA,KAAI,IAAM,GACVA,KAAI,KAAOuQ,EAAKc,eAChBrR,KAAI,cAAgB,OACpBuK,KAAK,SAAS/J,EAAEnF,GACb,GAAIkG,GAAIqP,EAAIpQ,EACZ,QAAO,GAAMe,GAAG5D,MAAK,OAAU,GAAK4D,IAE5C0P,EAAWpG,gBAAgBC,EAAa,iBACnC9K,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,eAAiBhD,EAAGG,MAAM8H,UAAUK,EAAM7B,QAAQzD,IAAM,MAElEmE,OAAM,QACNsD,MAAK,UAAY,IA2BlC,GAvBAgO,EAAUvG,KAAK,SAAS/J,GAAK,MAAOA,MAEhC4Q,GAAiC,SAAlBb,EAAKG,UAAyC,UAAlBH,EAAKG,WAEhDF,EAAE5Q,UAAS,KACNlD,KAAK,SAAS8D,EAAEnF,GACbxC,GAAG2G,OAAOpG,MAAMoG,OAAM,QAASQ,KAAI,UAAY,IAC3CW,EAAMH,GAAKG,EAAM7B,QAAQ,GAAK,IAAM6B,EAAMH,GAAKG,EAAM7B,QAAQ,GAAK,OAC9D0B,EAAI,OAAa,OAAJA,IACb3H,GAAG2G,OAAOpG,MAAM4G,KAAI,UAAY,GAEpCnH,GAAG2G,OAAOpG,MAAMoG,OAAM,QAASQ,KAAI,UAAY,MAKvDW,EAAM3B,SAAS,IAAM2B,EAAM3B,SAAS,IAA2B,GAArB2B,EAAM3B,SAAS,IACzDW,EAAKC,UAAS,mBAAoBkD,MAAK,UAAY,SAAUtC,EAAGnF,GAC5D,MAAQA,GAAQ,EAAJ,KAKpB+V,IAAiC,QAAlBb,EAAKG,UAAwC,WAAlBH,EAAKG,UAAwB,CACvE,GAAIuB,KACJtS,GAAKC,UAAS,mBACTlD,KAAK,SAAS8D,EAAEnF,GACb,IACQA,EACA4W,EAAYnW,KAAK6E,EAAMH,GAAKpH,KAAKwM,wBAAwBnG,MAAQ,GAEjEwS,EAAYnW,KAAK6E,EAAMH,GAAKpH,KAAKwM,wBAAwBnG,MAAQ,GACxE,MAAOyS,GACA7W,EACA4W,EAAYnW,KAAK6E,EAAMH,GAAK,GAE5ByR,EAAYnW,KAAK6E,EAAMH,GAAK,MAI5CgQ,EAAE5Q,UAAS,KAAMlD,KAAK,SAAS8D,EAAGnF,IAC1BsF,EAAMH,GAAKyR,EAAY,IAAMtR,EAAMH,GAAKyR,EAAY,MAChDzR,EAAI,OAAa,OAAJA,EACb3H,GAAG2G,OAAOpG,MAAMsH,SAEhB7H,GAAG2G,OAAOpG,MAAMoG,OAAM,QAASkB,YAM/C8P,EAAE5Q,UAAS,SACNkF,OAAO,SAAUtE,GAMd,OAAQuP,WAAWhO,KAAK6E,MAAU,IAAJpG,GAAc,MAAmBtE,SAANsE,IAE5DuC,QAAO,QAAS,GAGrB4N,EAAShQ,EAAMwR,SAInBrH,EAAYS,UAAS,kBACdpQ,EA/UX,GAAIoV,GAAO1X,GAAG0V,IAAIgC,OACd5P,EAAQ9H,GAAG8H,MAAMC,SAEjBhD,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,GACRC,EAAS,GACTqR,EAAgB,KAChBK,GAAa,EACbO,EAAe,EACfK,GAAe,EACfD,GAAgB,EAChBZ,GAAY,EACZV,EAAQ,KACRa,EAAoB,EACpB9G,EAAWtO,OACX4E,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,YAE5BwX,GACK5P,MAAMA,GACN+P,OAAM,UACNG,WAAW,SAASrQ,GAAK,MAAOA,IAOrC,IAAImQ,GACA7F,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EAkWjD,OAxCA3F,GAAMoV,KAAOA,EACbpV,EAAMpC,SAAWA,EAEjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAC1CA,EAAMgM,SAAWC,OAAOC,WAEpBiK,mBAAoBhK,IAAK,WAAW,MAAOgK,IAAqB/J,IAAK,SAASvG,GAAGsQ,EAAkBtQ,IACnG+Q,eAAoBzK,IAAK,WAAW,MAAOyK,IAAiBxK,IAAK,SAASvG,GAAG+Q,EAAc/Q,IAC3F2Q,cAAoBrK,IAAK,WAAW,MAAOqK,IAAgBpK,IAAK,SAASvG,GAAG2Q,EAAa3Q,IACzFgR,cAAoB1K,IAAK,WAAW,MAAO0K,IAAgBzK,IAAK,SAASvG,GAAGgR,EAAahR,IACzFoQ,YAAoB9J,IAAK,WAAW,MAAO8J,IAAc7J,IAAK,SAASvG,GAAGoQ,EAAWpQ,IACrF8P,WAAoBxJ,IAAK,WAAW,MAAOyJ,IAAiBxJ,IAAK,SAASvG,GAAG+P,EAAc/P,IAC3FtB,QAAoB4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAC7EyP,OAAoBnJ,IAAK,WAAW,MAAOmJ,IAASlJ,IAAK,SAASvG,GAAGyP,EAAMzP,IAC3EvB,OAAoB6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAC3EwJ,UAAoBlD,IAAK,WAAW,MAAOkD,IAAYjD,IAAK,SAASvG,GAAGwJ,EAASxJ,IAGjFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAmB5B,SAAV8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAqBjT,SAAZ8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAoB3B,SAAX8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAASE,EACT8J,EAAYW,MAAM3K,KAEtBH,OAAQ2G,IAAK,WAAW,MAAO3G,IAAS4G,IAAK,SAASvG,GAClDL,EAAQK,EACRuP,EAAK5P,MAAMA,GACXwQ,EAAwC,kBAArBxQ,GAAMjC,WACzBrG,EAAGG,MAAMkV,iBAAiBvS,EAAOwF,GAAO,SAAW,QAAS,YAAa,mBAIjFtI,EAAGG,MAAMqP,YAAY1M,GACrB9C,EAAGG,MAAMkV,iBAAiBvS,EAAOoV,GAAM,SAAW,aAAc,gBAAiB,WAAY,cAAe,eAC5GlY,EAAGG,MAAMkV,iBAAiBvS,EAAOwF,GAAO,SAAW,QAAS,YAAa,eAElExF,GCtYX9C,EAAGI,OAAO2Z,QAAU,WAChB,YAsCA,SAASjX,GAAMsB,GA+NX,MA9NAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD3P,GAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,GAGjBd,EAAOO,OAAOqT,GAAW1V,EAAKmQ,IAAI,SAAStM,EAAEnF,GAAK,MAAOiX,GAAK9R,EAAEnF,MAC3DqD,WAAW6T,IAAW,EAAGvU,GAAiB,GAG/C,IAAIwU,KACJ,KAAKC,EAAS,CAEV,GAAiBC,GAAMC,EAAnBzR,IACJvE,GAAK2F,QAAQ,SAAU9B,EAAGnF,GACtB,GAAIuX,GAAKC,EAAMrS,GAAIsS,EAAKC,EAAMvS,GAAIwS,EAAKC,EAAMzS,GAAI0S,EAAKC,EAAM3S,GACxD4S,EAAUC,EAAW7S,EACrB4S,IACAA,EAAQ9Q,QAAQ,SAAUrI,EAAGoB,GACzB6F,EAAOpF,KAAKwX,EAAWrZ,EAAGoB,EAAGa,WAGjC8W,GAAM9R,EAAOpF,KAAKkX,GAClBJ,GAAM1R,EAAOpF,KAAK8W,GAClBE,GAAM5R,EAAOpF,KAAKgX,GAClBI,GAAMhS,EAAOpF,KAAKoX,KAE1BR,EAAO7Z,GAAGgJ,IAAIX,GACdyR,EAAO9Z,GAAG6I,IAAIR,GACdsR,GAAUE,EAAMC,GAGpBY,EAAOvU,OAAOyT,GAAWD,GACzBe,EAAOzU,MAAM0U,IAAWvV,EAAiB,IAGzCwV,EAAUA,GAAWhV,EACrBiV,EAAUA,GAAWH,EAAOpB,OAAOrT,OAAOyU,EAAO,GAAGA,EAAO,IAG3D,IAAI5T,GAAOJ,EAAUK,UAAS,aAAcjD,MAAMA,GAClCgD,GAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,eACvDL,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAEvE,IAAI6V,GAAWhU,EAAKC,UAAS,eAAgBjD,KAAK,SAAS6D,GAAK,MAAOA,KACnEoT,EAAWD,EAAS7T,QAAQC,OAAM,KAAM+C,MAAK,iBAAmB,MAAMA,MAAK,eAAiB,KAChG6Q,GACK3T,KAAI,QAAU,cACdA,KAAI,YAAc,SAASQ,EAAEnF,EAAEwY,GAAK,MAAO,cAAgBpV,EAAO6T,EAAK9R,EAAEnF,IAA2B,IAArBoD,EAAOM,aAAsB,SAC5GgE,QAAO,QAAU,SAASvC,GAAK,MAAOA,GAAEsT,QAC7CH,EACK9I,gBAAgBC,EAAa,wBAC7BhI,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,KACtB2D,MAAM,SAASjG,EAAEnF,GAAK,MAAOA,GAAIyF,EAAWnE,EAAKlC,SACjDuF,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,cAAgBoD,EAAO6T,EAAK9R,EAAEnF,IAA2B,IAArBoD,EAAOM,aAAsB,SAEhF4U,EAASlT,OAAOC,SAKhBkT,EAASlX,KAAK,SAAS8D,EAAEnF,GACrB,GAAIuW,GAAM/Y,GAAG2G,OAAOpG,OACnB6Z,EAAOE,GAAO7Q,QAAQ,SAAUyR,GAC7B,GAAa7X,SAAT6X,EAAEvT,IAA6B,OAATuT,EAAEvT,GAAa,CACrC,GAAIqE,GAAOkP,IAAMd,EAAS,MAAQ,MAClCrB,GAAI7R,OAAM,QACP+C,MAAK,SAAWgG,EAAStI,IAAMmE,EAAMnE,EAAEnF,IACvC2E,KAAI,QAAU,iCAAmC6E,GACpD+M,EAAI7R,OAAM,QACP+C,MAAK,SAAWgG,EAAStI,IAAMmE,EAAMnE,EAAEnF,IACvC2E,KAAI,QAAU,8BAAgC6E,OAK7D,IAAImP,GAAY,WAAa,MAAwB,QAAhBC,EAA4C,GAArBxV,EAAOM,YAAoBgD,KAAKF,IAAI,GAAyB,GAArBpD,EAAOM,cACvGmV,EAAY,WAAa,MAA4B,IAArBzV,EAAOM,YAAqBiV,IAAY,GACxEG,EAAY,WAAa,MAA4B,IAArB1V,EAAOM,YAAqBiV,IAAY,IAG3Ef,EAAOE,GAAO7Q,QAAQ,SAAUyR,GAC7B,GAAIlP,GAAOkP,IAAMd,EAAS,MAAQ,OAC9BmB,EAAYL,IAAMd,EAASJ,EAAQE,CACvCY,GAASnU,OAAM,sCAAyCqF,GACrDgG,gBAAgBC,EAAa,wBAC3B9K,KAAI,KAA4B,IAArBvB,EAAOM,aAClBiB,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAOkY,GAAOQ,EAAEvT,MAC3CR,KAAI,KAA4B,IAArBvB,EAAOM,aAClBiB,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAOkY,GAAOa,EAAS5T,MACvDmT,EAASnU,OAAM,mCAAsCqF,GAClDgG,gBAAgBC,EAAa,wBAC3B9K,KAAI,KAAOkU,GACXlU,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAOkY,GAAOQ,EAAEvT,MAC3CR,KAAI,KAAOmU,GACXnU,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAOkY,GAAOQ,EAAEvT,SAGnDyS,EAAOE,GAAO7Q,QAAQ,SAAUyR,GAC7B,GAAIlP,GAAOkP,IAAMd,EAAS,MAAQ,MAClCW,GAAShU,UAAS,eAAkBiF,GACjC7K,GAAE,YAAc,SAASwG,EAAEnF,EAAEwY,GAC1Bhb,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsb,kBACL5P,QAAUI,IAAKkP,EAAEvT,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEqT,IACnD5Z,EAAGpB,GAAGuE,UAGbpD,GAAE,WAAa,SAASwG,EAAEnF,EAAEwY,GACzBhb,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsF,iBACLoG,QAAUI,IAAKkP,EAAEvT,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEqT,IACnD5Z,EAAGpB,GAAGuE,UAGbpD,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAASmG,kBAAkBjF,EAAGpB,GAAGuE,YAK3CwW,EAAS7T,OAAM,QACVC,KAAI,QAAU,kBAEdhG,GAAE,YAAc,SAASwG,EAAEnF,GACxBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsb,kBACLxP,IAAKyN,EAAK9R,GACV6D,MAAOiO,EAAK9R,GACZiE,SACMI,IAAK,KAAMR,MAAO0O,EAAMvS,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEnF,KAC1DwJ,IAAK,KAAMR,MAAOiQ,EAAM9T,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEnF,KAC1DwJ,IAAK,KAAMR,MAAOwO,EAAMrS,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEnF,KAEhEsB,KAAM6D,EACNiB,MAAOpG,EACPpB,EAAGpB,GAAGuE,UAGbpD,GAAE,WAAa,SAASwG,EAAEnF,GACvBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsF,iBACLwG,IAAKyN,EAAK9R,GACV6D,MAAOiO,EAAK9R,GACZiE,SACMI,IAAK,KAAMR,MAAO0O,EAAMvS,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEnF,KAC1DwJ,IAAK,KAAMR,MAAOiQ,EAAM9T,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEnF,KAC1DwJ,IAAK,KAAMR,MAAOwO,EAAMrS,GAAImE,MAAOmE,EAAStI,IAAMmE,EAAMnE,EAAEnF,KAEhEsB,KAAM6D,EACNiB,MAAOpG,EACPpB,EAAGpB,GAAGuE,UAGbpD,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAASmG,kBAAkBjF,EAAGpB,GAAGuE,UAIzCuW,EAASnU,OAAM,uBACZqL,gBAAgBC,EAAa,qBAC3B9K,KAAI,IAAM,SAASQ,EAAEnF,GAAK,MAAOkY,GAAOR,EAAMvS,MAC9CR,KAAI,QAAUgU,GACdhU,KAAI,IAAMkU,GACVlU,KAAI,SAAW,SAASQ,EAAEnF,GAAK,MAAO0G,MAAKC,IAAIuR,EAAOR,EAAMvS,IAAM+S,EAAOV,EAAMrS,MAAQ,IACvFsC,MAAK,OAAS,SAAStC,EAAEnF,GAAK,MAAOyN,GAAStI,IAAMmE,EAAMnE,EAAEnF,KAC5DyH,MAAK,SAAW,SAAStC,EAAEnF,GAAK,MAAOyN,GAAStI,IAAMmE,EAAMnE,EAAEnF,KAGnEuY,EAAS7T,OAAM,QAASC,KAAI,QAAU,qBAEtC2T,EAASnU,OAAM,0BACZqL,gBAAgBC,EAAa,6BAC3B9K,KAAI,KAAOkU,GACXlU,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAOkY,GAAOe,EAAM9T,MAC/CR,KAAI,KAAOmU,GACXnU,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAOkY,GAAOe,EAAM9T,KAGpD,IAAI+T,GAAWZ,EAAS/T,UAAS,uBAAwBjD,KAAK,SAAS6D,GACnE,MAAO6S,GAAW7S,QAEtB+T,GAASzU,QAAQC,OAAM,UAClB+C,MAAK,OAAS,SAAStC,EAAEnF,EAAEwY,GAAK,MAAOW,GAAWhU,EAAEnF,EAAEwY,IAAMlP,EAAMnE,EAAEqT,KACpE/Q,MAAK,SAAW,SAAStC,EAAEnF,EAAEwY,GAAK,MAAOW,GAAWhU,EAAEnF,EAAEwY,IAAMlP,EAAMnE,EAAEqT,KACtE/Q,MAAK,UAAY,KACjB9I,GAAE,YAAc,SAASwG,EAAEnF,EAAEwY,GAC1Bhb,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsb,kBACL5P,QAAUI,IAAK4P,EAAWjU,EAAEnF,EAAEwY,GAAIlP,MAAO6P,EAAWhU,EAAEnF,EAAEwY,IAAMlP,EAAMnE,EAAEqT,IACtE5Z,EAAGpB,GAAGuE,UAGbpD,GAAE,WAAa,SAASwG,EAAEnF,EAAEwY,GACzBhb,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsF,iBACLoG,QAAUI,IAAK4P,EAAWjU,EAAEnF,EAAEwY,GAAIlP,MAAO6P,EAAWhU,EAAEnF,EAAEwY,IAAMlP,EAAMnE,EAAEqT,IACtE5Z,EAAGpB,GAAGuE,UAGbpD,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAASmG,kBAAkBjF,EAAGpB,GAAGuE,UAEzCmX,EAASvU,KAAI,QAAU,sBACvBuU,EACG1J,gBAAgBC,EAAa,kCAC3B9K,KAAI,KAA4B,IAArBvB,EAAOM,aAClBiB,KAAI,KAAO,SAASQ,EAAEnF,EAAEwY,GAAK,MAAON,GAAOD,EAAW9S,EAAEnF,EAAEwY,MAC1D7T,KAAI,IAAM,KACfuU,EAAS9T,OAAOC,SAGhB+S,EAAUhV,EAAO0T,OACjBuB,EAAUH,EAAOpB,SAGrBrH,EAAYS,UAAS,wBACdpQ,EA/PX,GAmBIkX,GAASE,EACTE,EAASe,EASTC,EAASC,EA7BT9V,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC7C4B,EAAQ,IACRC,EAAS,IACTkD,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrB/E,EAAS5F,GAAG8H,MAAMsI,UAClBsK,EAAS1a,GAAG8H,MAAMC,SAClB0R,EAAQ,SAAS9R,GAAK,MAAOA,GAAEkU,OAC/B7B,EAAQ,SAASrS,GAAK,MAAOA,GAAEU,OAAOyT,IACtCL,EAAQ,SAAS9T,GAAK,MAAOA,GAAEU,OAAO0T,IACtC7B,EAAQ,SAASvS,GAAK,MAAOA,GAAEU,OAAO2T,IACtC5B,EAAQ,SAASzS,GAAK,MAAOA,GAAEU,OAAO4T,aACtC3B,EAAQ,SAAS3S,GAAK,MAAOA,GAAEU,OAAO6T,cACtCjM,EAAW,SAAStI,GAAK,MAAOA,GAAEmE,OAClC0O,EAAc,SAAS7S,GAAK,MAAOA,GAAEU,OAAOqT,UAC5CjB,EAAa,SAAS9S,EAAGnF,EAAGwY,GAAK,MAAOrT,IACxCiU,EAAa,SAASjU,EAAGnF,EAAGwY,GAAK,MAAOrT,IACxCgU,EAAa,SAAShU,EAAGnF,EAAGwY,GAAK,MAAO3X,SACxCyI,EAAQtM,EAAGG,MAAMuQ,eACjBxJ,EAAY,KAGZxG,EAAWF,GAAGE,SAAQ,mBAAqB,kBAAmB,mBAAoB,aAClF+H,EAAW,IACXmT,EAAc,KAOdnJ,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EA8RjD,OAtDA3F,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAc6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACrEtB,QAAc4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACvEiT,aAAc3M,IAAK,WAAW,MAAO2M,IAAe1M,IAAK,SAASvG,GAAGiT,EAAYjT,IACjFb,GAAcmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IACnE4R,IAAKtL,IAAK,WAAW,MAAOuL,IAAStL,IAAK,SAASvG,GAAG6R,EAAM7R,IAC5DgU,IAAK1N,IAAK,WAAW,MAAOgN,IAAS/M,IAAK,SAASvG,GAAGsT,EAAMtT,IAC5D8R,IAAKxL,IAAK,WAAW,MAAOyL,IAASxL,IAAK,SAASvG,GAAG+R,EAAM/R,IAC5DgS,IAAK1L,IAAK,WAAW,MAAO2L,IAAS1L,IAAK,SAASvG,GAAGiS,EAAMjS,IAC5DkS,IAAK5L,IAAK,WAAW,MAAO6L,IAAS5L,IAAK,SAASvG,GAAGmS,EAAMnS,IAC5DiU,WAAe3N,IAAK,WAAW,MAAOwB,IAAYvB,IAAK,SAASvG,GAAG8H,EAAS9H,IAC5EuT,UAAejN,IAAK,WAAW,MAAO+L,IAAc9L,IAAK,SAASvG;AN3R1E,AM2R6EqS,EN3R3E,AM2RsFrS,CN3RrF,GAAG,AM4REkU,CN5RD,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EM4RK5N,IN5RC,AM4RI,WAAW,MAAOgM,IAAc/L,IAAK,SAASvG,GAAGsS,EAAWtS,IAChFmU,cAAe7N,IAAK,WAAW,MAAOmN,IAAclN,IAAK,SAASvG,GAAGyT,EAAWzT,IAChFoU,cAAe9N,IAAK,WAAW,MAAOkN,IAAcjN,IAAK,SAASvG,GAAGwT,EAAWxT,IAChFvC,QAAU6I,IAAK,WAAW,MAAO7I,IAAU8I,IAAK,SAASvG,GAAGvC,EAAOuC,IACnEuS,QAAUjM,IAAK,WAAW,MAAOiM,IAAUhM,IAAK,SAASvG,GAAGuS,EAAOvS,IACnEqR,SAAU/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IACrEyR,SAAUnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IACrEuR,QAAUjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACnEwS,QAAUlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACnE4B,IAAc0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAE/D4G,GACIN,IAAK,WAED,MADA9M,SAAQK,KAAI,mFAGhB0M,IAAK,SAASvG,GACVxG,QAAQK,KAAI,iFAIpB+C,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9BF,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,OAI1BzI,EAAGG,MAAMqP,YAAY1M,GAEdA,GCnUX9C,EAAGI,OAAO4c,aAAe,WACrB,YA0CA,SAASla,GAAMsB,GAuHX,MAtHAqO,GAAYW,QACZX,EAAYrS,OAAO6c,GACfC,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAElCjZ,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,IAAkByB,GAAUgL,SAASlL,EAAUuD,MAAK,WAAc,KAAOlF,EAAOC,KAAOD,EAAOuR,MAC9FlR,GAAmByB,GAAU+K,SAASlL,EAAUuD,MAAK,YAAe,KAAOlF,EAAOE,IAAMF,EAAOsR,MAUnG,IARA/T,EAAMqR,OAAS,WACXzT,EAAS4c,eACTpW,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAEnDA,EAAMoE,UAAYnG,MAIbuD,IAASA,EAAKlC,OAAQ,CACvB,GAAI6U,GAAa/P,EAAUK,UAAS,cAAejD,MAAMyS,GAYzD,OAVAE,GAAWxP,QAAQC,OAAM,QACpBC,KAAI,QAAU,kBACdA,KAAI,KAAO,SACX8C,MAAK,cAAgB,UAE1BwM,EACKtP,KAAI,IAAMpC,EAAOC,KAAOG,EAAiB,GACzCgC,KAAI,IAAMpC,EAAOE,IAAMG,EAAkB,GACzCsM,KAAK,SAAS/J,GAAK,MAAOA,KAExBrF,EAEPoE,EAAUK,UAAS,cAAec,SAItCP,EAAImV,EAAQ7W,SACZmJ,EAAI0N,EAAQ/B,SAASqC,OAAM,EAG3B,IAAIjW,GAAOJ,EAAUK,UAAS,gCAAiCjD,MAAMA,IACjEkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,mCAAmCD,OAAM,KACzF+V,EAAYD,EAAO9V,OAAM,QACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAC5BD,OAAM,KAAMC,KAAI,QAAU,eAC1BD,OAAM,QAEX8V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,eACjCwQ,EAAExQ,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEhEiY,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAI3DsX,EAAQ7V,MAAMzB,GAAgB0B,OAAOzB,EAErC,IAAI+X,GAAWxF,EAAEhR,OAAM,gBAClByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAc/C,IAZAF,EAASxP,aAAa/M,KAAK6b,GAE3BQ,EAAU/V,OAAM,YACXC,KAAI,KAAO,mBAAqBsV,EAAQ1S,MACxC7C,OAAM,QAEXyQ,EAAEhR,OAAM,oBAAuB8V,EAAQ1S,KAAO,SACzC5C,KAAI,QAAUG,EAAEpB,aAAegT,EAAgB,EAAI,IACnD/R,KAAI,SAAW,IACfA,KAAI,KAAOG,EAAEpB,aAAegT,EAAgB,EAAI,IAGjDwD,EAAW,CACXC,EACK7U,MAAMR,GACNsQ,MAAOpY,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAC/CwZ,UAAUlY,EAAiB,GAEhCuS,EAAEhR,OAAM,iBAAkBQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,KAC5E0R,EAAEhR,OAAM,iBAAkB/F,KAAK+b,EAE/B,IAAI/D,GAASjB,EAAEhR,OAAM,iBAAkBI,UAAS,IAC5CmS,IACAN,EACK7R,UAAS,QACTI,KAAI,YAAc,SAASQ,EAAEnF,EAAEwY,GAAK,MAAO,gBAAkBA,EAAI,IAAM,EAAI,IAAM,MAAQ,MAIlG4B,IACAC,EACK/U,MAAMiH,GACN6I,MAAO1O,KAAKwB,MAAMtF,EAAgB,KAClCkY,UAAWnY,EAAgB,GAEhCwS,EAAEhR,OAAM,iBAAkB/F,KAAKic,IAInClF,EAAEhR,OAAM,qBACHQ,KAAI,KAAM,GACVA,KAAI,KAAMhC,GACVgC,KAAI,KAAO4H,EAAE,IACb5H,KAAI,KAAO4H,EAAE,MAQtBkD,EAAYS,UAAS,8BACdpQ,EA3JX,GAaIgF,GAAGyH,EAbH0N,EAAUjd,EAAGI,OAAO2Z,UACpBoD,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAElB3S,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAChD4B,EAAQ,KACRC,EAAS,KACTiF,EAAQtM,EAAGG,MAAMsQ,WACjByM,GAAY,EACZE,GAAY,EACZM,GAAkB,EAClBhE,GAAgB,EAChBxZ,EAAUF,EAAGI,OAAOF,UAEpB6W,EAAS,qBACTrW,EAAWF,GAAGE,SAAQ,eAAiB,aACvC+H,EAAW,GAEf0U,GACK9E,OAAM,UACNU,YAAW,GACXP,WAAW,SAASrQ,GAAK,MAAOA,KAErCkV,EACKhF,OAAM,EAAqB,QAAU,QACrCG,WAAWhY,GAAGmM,OAAM,SAGzBzM,EAAQuI,SAAS,EAMjB,IAAIgK,GAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EA6LjD,OA7DAwU,GAAQvc,SAASiB,GAAE,2BAA6B,SAASoc,GACrD7d,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7B+W,EAAQvc,SAASiB,GAAE,0BAA4B,SAASoc,GACpD7d,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7B+W,EAAQvc,SAASiB,GAAE,2BAA6B,SAASoc,GACrD7d,MAOJ4C,EAAMpC,SAAWA,EACjBoC,EAAMma,QAAUA,EAChBna,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtE+Q,eAAgBzK,IAAK,WAAW,MAAOyK,IAAiBxK,IAAK,SAASvG,GAAG+Q,EAAc/Q,IACvFuU,WAAYjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAC3EyU,WAAYnO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC3EqV,gBAAoB/O,IAAK,WAAW,MAAO/O,IAAWgP,IAAK,SAASvG,GAAGzI,EAAQyI,IAC/EoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAGrEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClBwU,EAAQxU,SAASA,GACjB0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,KAEnB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1BsU,EAAQ3Q,MAAMA,KAElBoR,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQ,EAAM,QAAU,YAItCrY,EAAGG,MAAMkW,eAAevT,EAAOma,GAC/Bjd,EAAGG,MAAMqP,YAAY1M,GAEdA,GCjOX9C,EAAGI,OAAO6d,OAAS,WACf,YA6BA,SAASC,GAAWC,EAAQtV,GACxB,GAAIuV,GAAKD,EAAOhd,OAChBgd,GAAOvI,KAAK,SAAS9F,EAAGuO,GACpB,GAAIC,GAAKF,EAAGjL,QAAQrD,GAChByO,EAAKH,EAAGjL,QAAQkL,EACpB,OAAO7d,IAAGge,WAAW3V,EAAOyV,GAAKzV,EAAO0V,MAIhD,QAASzb,GAAMsB,GAkOX,MAjOAA,GAAUC,KAAK,SAAS8D,EAAGnF,GACvB,GAAI2C,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD3P,GAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,EAEjB,IAAIuX,GAASC,EAAOtd,KAAKL,KAAMoH,EAAGnF,GAAG7B,QACjCwd,EAAUC,EAAQxd,KAAKL,KAAMoH,EAAGnF,GAAG7B,QACnC0d,EAAcC,EAAY1d,KAAKL,KAAMoH,EAAGnF,GAAG7B,QAC3C4d,EAAWC,EAAS5d,KAAKL,KAAMoH,EAAGnF,GAAG7B,QACrC8d,EAAcC,EAAY9d,KAAKL,KAAMoH,EAAGnF,GAAG7B,QAC3Cge,EAAeC,EAAahe,KAAKL,KAAMoH,EAAGnF,GAAG7B,QAC7Cke,EAAmBC,EAAiBle,KAAKL,KAAMoH,EAAGnF,GAAG7B,QACrDoe,EAAgBC,EAAcpe,KAAKL,KAAMoH,EAAGnF,GAAG7B,OAGnD+c,GAAWe,EAAaR,GACxBP,EAAWiB,EAAcR,GACzBT,EAAWmB,EAAkBR,GAC7BX,EAAWqB,EAAeR,GAG1BN,EAAO7I,KAAKpV,GAAGge,YACfG,EAAQ/I,KAAKpV,GAAGge,YAChBK,EAAYjJ,KAAKpV,GAAGge,YACpBO,EAASnJ,KAAKpV,GAAGge,WAIjB,IAAIiB,GAAKjf,GAAG8H,MAAMC,SACb5B,OAAQnG,GAAGkf,OAAOlf,GAAGmf,OAAOC,EAAQnB,MACpChY,MAAM6Q,GAAW3R,EAAgB,IAAM,EAAGA,GAGtC5E,MAAK8e,WAAarf,GAAG8H,MAAMC,SAC/B5B,QAAQ,EAAGoD,EAAAA,IACXtD,MAAMgZ,EAAGhZ,QAGd1F,MAAK8e,UAAYJ,CAYjB,KAAI,GALAnY,IALW9G,GAAGgJ,IAAIiV,GACPje,GAAG6I,IAAIoV,GACPA,EAAO,GAGXvX,EAAUK,UAAS,uBAAwBjD,MAAM6D,KACxDX,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,0BACnD6V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,KAEXnE,EAAE,EAAE8c,EAAGrB,EAAOrc,OAAU0d,EAAF9c,EAAMA,IAAC,CACjC,GAAI+c,GAAkB,oBAAoB/c,CAClC,IAALA,IACC+c,EAAkBA,EAAkB,YAAYC,EAAsBhd,IAE1Ewa,EAAO9V,OAAM,QAASC,KAAI,QAAUoY,GAGxCvC,EAAO9V,OAAM,QAASC,KAAI,QAAU,cAEpCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAOvE,KAAI,GAJAwa,GAAK,SAAS9X,GAAK,MAAOuB,MAAKC,IAAI8V,EAAGtX,GAAKsX,EAAG,KAE9CS,EAAM,SAAS/X,GAAK,MAAesX,GAAJ,EAAJtX,EAAWA,EAAQ,IAE1CnF,EAAE,EAAE8c,EAAGrB,EAAOrc,OAAU0d,EAAF9c,EAAMA,IAAC,CACjC,GAAIyD,GAAQgY,EAAOzb,EACnBmV,GAAEhR,OAAM,gBAAiBnE,GACpB4a,MAAMnX,GACNkB,KAAI,SAAW/B,GACfuI,aACA1F,SAASA,GACTd,KAAI,QAAUsY,EAAGxZ,IACjBkB,KAAI,IAAMuY,EAAIzZ,IAGvB0R,EAAEhR,OAAM,mBACHsD,MAAK,OAAS6B,GACd3E,KAAI,SAAW/B,EAAkB,GACjC+B,KAAI,IAAM/B,EAAkB,GAC5BjE,GAAE,YAAc,WACbjB,EAASsb,kBACLhQ,MAAO+S,EAAS,GAChB1C,MAAOkD,EAAc,IAAM,UAC3BjT,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,YAAc,WACbjB,EAASmG,kBACLmF,MAAO+S,EAAS,GAChB1C,MAAOkD,EAAc,IAAM,UAC3BjT,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,WACZjB,EAASsF,iBACLgG,MAAO+S,EAAS,GAChB1C,MAAOkD,EAAc,IAAM,UAC3BjT,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC0D,aACA1F,SAASA,GACTd,KAAI,QAAqB,EAAXoX,EACXU,EAAG,GAAKA,EAAGV,EAAS,IAClBU,EAAGV,EAAS,IAAMU,EAAG,IAC1B9X,KAAI,IAAMuY,EAAInB,GAEnB,IAAIoB,GAAMva,EAAkB,EAExBwa,EAAazB,EAAQlK,IAAK,SAAS4L,EAAQjX,GAC3C,OAAQ4C,MAAOqU,EAAQhE,MAAO8C,EAAa/V,KAE/CoU,GACGjW,UAAS,0BACTjD,KAAK8b,GACL3Y,QACAC,OAAM,QACNC,KAAI,QAAU,qBACdA,KAAI,IAAM,MAAQwY,EAAK,IAAMA,EAAK,KAAQA,EAAM,KAAQA,EAAM,KAAQA,EAAM,KAC5Exe,GAAE,YAAc,SAASwG,GACxBzH,EAASsb,kBACPhQ,MAAO7D,EAAE6D,MACTqQ,MAAOlU,EAAEkU,OAAS,WAClB/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5BwC,KAAMwS,EAAGtX,EAAE6D,OAAQpG,EAAgB,OAItCjE,GAAE,YAAc,SAASwG,GACtBzH,EAASmG,kBACLmF,MAAO7D,EAAE6D,MACTqQ,MAAOlU,EAAEkU,OAAS,WAClB/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,SAASwG,EAAGnF,GACxBtC,EAASsF,iBACLgG,MAAO7D,EAAE6D,MACTqQ,MAAOlU,EAAEkU,OAAS,WAClB/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAItC0N,EAAE5Q,UAAS,0BACRjD,KAAK8b,GACLjS,aACA1F,SAASA,GACTd,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAesX,EAAGtX,EAAE6D,OAAS,IAAOpG,EAAkB,EAAK,KAErG,IAAI0a,GAAkBzB,EAAYpK,IAAK,SAAS4L,EAAQjX,GACpD,OAAQ4C,MAAOqU,EAAQhE,MAAOgD,EAAiBjW,KAEnDoU,GACGjW,UAAS,sBACTjD,KAAKgc,GACL7Y,QACAC,OAAM,QACNC,KAAI,SAAW,IACfA,KAAI,QAAU,iBACdA,KAAI,KAAO,SAASQ,GAAK,MAAOsX,GAAGtX,EAAE6D,SACrCrE,KAAI,KAAO,KACXA,KAAI,KAAO,SAASQ,GAAK,MAAOsX,GAAGtX,EAAE6D,SACrCrE,KAAI,KAAO/B,EAAkB,GAC7BjE,GAAE,YAAc,SAASwG,GACxBzH,EAASsb,kBACPhQ,MAAO7D,EAAE6D,MACTqQ,MAAOlU,EAAEkU,OAAS,WAClB/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5BwC,KAAMwS,EAAGtX,EAAE6D,OAAQpG,EAAgB,OAItCjE,GAAE,YAAc,SAASwG,GACtBzH,EAASmG,kBACLmF,MAAO7D,EAAE6D,MACTqQ,MAAOlU,EAAEkU,OAAS,WAClB/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,SAASwG,EAAGnF,GACxBtC,EAASsF,iBACLgG,MAAO7D,EAAE6D,MACTqQ,MAAOlU,EAAEkU,OAAS,WAClB/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAItC0N,EAAE5Q,UAAS,sBACRjD,KAAKgc,GACLnS,aACA1F,SAASA,GACTd,KAAI,KAAO,SAASQ,GAAK,MAAOsX,GAAGtX,EAAE6D,SACrCrE,KAAI,KAAO,SAASQ,GAAK,MAAOsX,GAAGtX,EAAE6D,SAExC1E,EAAKC,UAAS,aACT5F,GAAE,YAAc,SAASwG,EAAEnF,GACxB,GAAIqZ,GAAQ4C,EAAYjc,IAAMud,EAAmBvd,EACjDtC,GAASsb,kBACLhQ,MAAO7D,EACPkU,MAAOA,EACP/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,YAAc,WACbjB,EAASmG,kBACLmF,MAAO+S,EAAS,GAChB1C,MAAOkD,EAAc,IAAM,WAC3BjT,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,SAASwG,EAAEnF,GACvB,GAAIqZ,GAAQ4C,EAAYjc,IAAMud,EAAmBvd,EACjDtC,GAASsF,iBACLgG,MAAO7D,EACPkU,MAAOA,EACP/P,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,cAKrC3H,EAlQX,GAAIyC,IAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C6S,EAAS,OACTf,GAAU,EACVoH,EAAS,SAASvW,GAAK,MAAOA,GAAEuW,QAChCE,EAAU,SAASzW,GAAK,MAAOA,GAAEyW,QAAUzW,EAAEyW,YAC7CE,EAAc,SAAS3W,GAAK,MAAOA,GAAE2W,YAAc3W,EAAE2W,aAAe,IACpEE,EAAW,SAAS7W,GAAK,MAAOA,GAAE6W,UAClCE,EAAc,SAAS/W,GAAK,MAAOA,GAAE+W,YAAc/W,EAAE+W,gBACrDE,EAAe,SAASjX,GAAK,MAAOA,GAAEiX,aAAejX,EAAEiX,iBACvDE,EAAmB,SAASnX,GAAK,MAAOA,GAAEmX,iBAAmBnX,EAAEmX,qBAC/DE,EAAgB,SAASrX,GAAK,MAAOA,GAAEqX,cAAgBrX,EAAEqX,kBACzDI,GAAU,GACVxY,EAAQ,IACRC,EAAS,GACTH,EAAY,KACZsR,EAAa,KACblM,EAAQtM,EAAGG,MAAMsQ,UAAQ,YACzB/P,EAAWF,GAAGE,SAAQ,mBAAqB,kBAAmB,oBAC9D6f,GAAqB,UAAY,OAAQ,WACzCP,GAAwB,MAAQ,MAAO,OACvCvX,EAAW,GAoRjB,OA/BA3F,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB0P,QAAczP,IAAK,WAAW,MAAOyP,IAAUxP,IAAK,SAASvG,GAAG+V,EAAO/V,IACvEiW,SAAc3P,IAAK,WAAW,MAAO2P,IAAW1P,IAAK,SAASvG,GAAGiW,EAAQjW,IACzEqW,UAAW/P,IAAK,WAAW,MAAO+P,IAAY9P,IAAK,SAASvG,GAAGqW,EAASrW,IACxEiX,QAAc3Q,IAAK,WAAW,MAAO2Q,IAAU1Q,IAAK,SAASvG,GAAGiX,EAAOjX,IACvEvB,OAAW6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAClEtB,QAAY4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACrE6P,YAAgBvJ,IAAK,WAAW,MAAOuJ,IAActJ,IAAK,SAASvG,GAAG6P,EAAW7P,IACjFF,UAAcwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GAAGF,EAASE,IAG3EpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D6S,QAASpJ,IAAK,WAAW,MAAOoJ,IAAUnJ,IAAK,SAASvG,GACpD0P,EAAS1P,EACT2O,EAAoB,SAAVe,GAA+B,UAAVA,IAEnC/L,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAIlC3I,EAAGG,MAAMqP,YAAY1M,GACdA,GChTX9C,EAAGI,OAAOogB,YAAc,WACpB,YA2BA,SAAS1d,GAAMsB,GAuHX,MAtHAA,GAAUC,KAAK,SAAS8D,EAAGnF,GACvB,GAAIkE,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EAEjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAOnD,IAJA/T,EAAMqR,OAAS,WAAarR,EAAMsB,IAClCtB,EAAMoE,UAAYnG,MAGboH,IAAMuW,EAAOtd,KAAKL,KAAMoH,EAAGnF,GAE5B,MADAhD,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,QAGtC,IAAIoW,GAASC,EAAOtd,KAAKL,KAAMoH,EAAGnF,GAAG7B,QAAQyU,KAAKpV,GAAGge,YACjDG,EAAUC,EAAQxd,KAAKL,KAAMoH,EAAGnF,GAAG7B,QAAQyU,KAAKpV,GAAGge,YACnDO,EAAWC,EAAS5d,KAAKL,KAAMoH,EAAGnF,GAAG7B,QAAQyU,KAAKpV,GAAGge,YAGrDlX,EAAOJ,EAAUK,UAAS,4BAA6BjD,MAAM6D,IAC7DX,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,+BACnD6V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,aAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAGvE,IAAIga,GAAKjf,GAAG8H,MAAMC,SACb5B,QAAQ,EAAG+C,KAAKL,IAAIoV,EAAO,GAAKE,EAAQ,IAAM,EAAII,EAAS,MAC3DtY,MAAM6Q,GAAW3R,EAAgB,IAAM,EAAGA,IAG3C8a,EAAK1f,KAAK8e,WAAarf,GAAG8H,MAAMC,SAC/B5B,QAAQ,EAAGoD,EAAAA,IACXtD,MAAMgZ,EAAGhZ,QAGd1F,MAAK8e,UAAYJ,CAEjB,IAGIiB,GAAQlD,EAAOrW,OAAM,cAAeO,OAAM,KACzCC,KAAI,cAAgB,OACpBA,KAAI,YAAc,iBAAmBN,EAAS9B,EAAOE,IAAMF,EAAOsR,QAAU,EAAI,IACrF6J,GAAMhZ,OAAM,QACPC,KAAI,QAAU,YACduK,KAAK,SAAS/J,GAAK,MAAOA,GAAEuY,QAEjCA,EAAMhZ,OAAM,QACPC,KAAI,QAAU,eACdA,KAAI,KAAO,OACXuK,KAAK,SAAS/J,GAAK,MAAOA,GAAEwY,WAEjC1C,EACK7W,MAAMzB,GACN0B,OAAOzB,EAEZ,IAAIgb,GAAazI,EAAEhR,OAAM,iBACzB3G,IAAG2N,WAAWyS,GAAYxf,KAAK6c,EAG/B,IAAItR,GAAS6L,GAAciH,EAAGjH,WAAY7S,EAAiB,KAGvDkb,EAAO1I,EAAE5Q,UAAS,aACjBjD,KAAKmb,EAAGrH,MAAOA,EAAQA,EAASzS,EAAiB,IAAO,SAASwC,GAC9D,MAAOpH,MAAK8Q,aAAelF,EAAOxE,KAItC2Y,EAAYD,EAAKpZ,QAAQC,OAAM,KAC9BC,KAAI,QAAU,WACdA,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAesY,EAAGtY,GAAK,QAC9DsC,MAAK,UAAY,KAEtBqW,GAAUpZ,OAAM,QACXC,KAAI,KAAO/B,GACX+B,KAAI,KAAyB,EAAlB/B,EAAsB,GAEtCkb,EAAUpZ,OAAM,QACXC,KAAI,cAAgB,UACpBA,KAAI,KAAO,OACXA,KAAI,IAAwB,EAAlB/B,EAAsB,GAChCsM,KAAKvF,EAGV,IAAIoU,GAAavgB,GAAG2N,WAAW0S,GAC1B1S,aACA1F,SAASwV,EAAOxV,YAChBd,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAesX,EAAGtX,GAAK,QAC9DsC,MAAK,UAAY,EAEtBsW,GAAW5Z,OAAM,QACZQ,KAAI,KAAO/B,GACX+B,KAAI,KAAyB,EAAlB/B,EAAsB,GAEtCmb,EAAW5Z,OAAM,QACZQ,KAAI,IAAwB,EAAlB/B,EAAsB,GAGrCpF,GAAG2N,WAAW0S,EAAKzY,QACd+F,aACA1F,SAASwV,EAAOxV,YAChBd,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAesX,EAAGtX,GAAK,QAC9DsC,MAAK,UAAY,MACjBpC,WAGT7H,GAAGwgB,MAAMC,QACFne,EA5IX,GAAImb,GAASje,EAAGI,OAAO6d,SACnB/d,EAAUF,EAAGI,OAAOF,UAEpBmY,EAAS,OACPf,GAAU,EACV/R,GAAUE,IAAK,EAAGqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,KAC/CkZ,EAAS,SAASvW,GAAK,MAAOA,GAAEuW,QAChCE,EAAU,SAASzW,GAAK,MAAOA,GAAEyW,QAAUzW,EAAEyW,YAC7CI,EAAW,SAAS7W,GAAK,MAAOA,GAAE6W,UAClC5X,EAAQ,KACRC,EAAS,GACTmR,EAAa,KACbJ,EAAQ,KACRrB,EAAS,KACTrW,EAAWF,GAAGE,UA2LpB,OAxLAR,GACKuI,SAAS,GACT+C,eAAc,GAgInByS,EAAOvd,SAASiB,GAAE,2BAA6B,SAASoc,GACpDA,EAAW,QACPvR,IAAKuR,EAAI1B,MACTrQ,MAAO+R,EAAI/R,MACXM,MAAOyR,EAAIzR,OAEfpM,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7B+X,EAAOvd,SAASiB,GAAE,0BAA4B,SAASoc,GACnD7d,EAAQgG,QAAO,KAGnB+X,EAAOvd,SAASiB,GAAE,2BAA6B,SAASoc,GACpD7d,MAOJ4C,EAAMmb,OAASA,EACfnb,EAAMpC,SAAWA,EACjBoC,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB0P,QAAczP,IAAK,WAAW,MAAOyP,IAAUxP,IAAK,SAASvG,GAAG+V,EAAO/V,IACvEiW,SAAc3P,IAAK,WAAW,MAAO2P,IAAW1P,IAAK,SAASvG,GAAGiW,EAAQjW,IACzEqW,UAAW/P,IAAK,WAAW,MAAO+P,IAAY9P,IAAK,SAASvG,GAAGqW,EAASrW,IACxEvB,OAAW6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAClEtB,QAAY4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACrE6P,YAAgBvJ,IAAK,WAAW,MAAOuJ,IAActJ,IAAK,SAASvG,GAAG6P,EAAW7P,IACjFyP,OAAWnJ,IAAK,WAAW,MAAOmJ,IAASlJ,IAAK,SAASvG,GAAGyP,EAAMzP,IAClEoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAGrEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D6S,QAASpJ,IAAK,WAAW,MAAOoJ,IAAUnJ,IAAK,SAASvG,GACpD0P,EAAS1P,EACT2O,EAAoB,SAAVe,GAA+B,UAAVA,MAIvCrY,EAAGG,MAAMkW,eAAevT,EAAOmb,GAC/Bje,EAAGG,MAAMqP,YAAY1M,GAEdA,GCnNX9C,EAAGI,OAAO8gB,eAAiB,WACvB,YAoCA,SAASpe,GAAMsB,GA4HX,MA3HAA,GAAUC,KAAK,SAASC,GACpB4C,EAAY1G,GAAG2G,OAAOpG,KACtB,IAAI4E,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAElEvF,GAAGG,MAAMsW,QAAQvP,EAGjB,IAAIia,GAAYxb,EAAiBrB,EAAK,GAAGuE,OAAOzG,OAAU,GAG1D0F,GAAEnB,OAAOqT,GAAWxZ,GAAGkf,OAAOpb,EAAK,GAAGuE,OAAO4L,IAAIwF,GAAMvY,OAAOke,KAE1DwB,EACAtZ,EAAErB,MAAMyT,IAA4B,GAAjBvU,EAAsBrB,EAAK,GAAGuE,OAAOzG,OAAQuD,GAAkBrB,EAAK,GAAGuE,OAAOzG,OAAS,IAAOkC,EAAK,GAAGuE,OAAOzG,SAEhI0F,EAAErB,MAAMyT,IAAW,EAAIiH,EAAW,EAAGxb,EAAiBwb,EAAW,EAAI,IAEzE5R,EAAE5I,OAAOyT,IACD5Z,GAAGgJ,IAAIlF,EAAK,GAAGuE,OAAO4L,IAAI4M,GAAQ3f,OAAO4f,IACzC9gB,GAAG6I,IAAI/E,EAAK,GAAGuE,OAAO4L,IAAI8M,GAAS7f,OAAO4f,MAEhD7a,MAAM0U,IAAWvV,EAAiB,IAGhCkC,EAAEnB,SAAS,KAAOmB,EAAEnB,SAAS,KAC7BmB,EAAEnB,SAAS,GACPmB,EAAEnB,QAAQmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,GAAWmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,KACzEmB,EAAEnB,QAAM,GAAK,KAEnB4I,EAAE5I,SAAS,KAAO4I,EAAE5I,SAAS,KAC7B4I,EAAE5I,SAAS,GACP4I,EAAE5I,QAAQ4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,GAAW4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,KACzE4I,EAAE5I,QAAM,GAAK,IAGvB,IAAIW,GAAO9G,GAAG2G,OAAOpG,MAAMwG,UAAS,+BAAgCjD,MAAMA,EAAK,GAAGuE,SAC9ErB,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,kCACnD8V,EAAYjW,EAAUE,OAAM,QAC5B8V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,YAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvEyB,EACKvF,GAAE,QAAU,SAASwG,EAAEnF,GACpBtC,EAAS8gB,YACLld,KAAM6D,EACNiB,MAAOpG,EACPiK,IAAKzM,GAAGuE,MACRwF,GAAIA,MAIhBkT,EAAU/V,OAAM,YACXC,KAAI,KAAO,sBAAwB4C,GACnC7C,OAAM,QAEXJ,EAAKH,OAAM,uBAA0BoD,EAAK,SACrC5C,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpBuS,EAAKxQ,KAAI,YAAc8Z,EAAW,2BAA6BlX,EAAK,IAAM,GAE1E,IAAI6N,GAAQ9Q,EAAKH,OAAM,aAAcI,UAAS,YACzCjD,KAAK,SAAS6D,GAAK,MAAOA,IAC/BiQ,GAAMhQ,OAAOC,QAEb,IAAIqZ,GAAatJ,EAAM3Q,QAAQC,OAAM,IAGrC0Q,GACKzQ,KAAI,QAAU,SAASQ,EAAGnF,EAAGwY,GAAK,OAAQmG,EAAQxZ,EAAGnF,GAAK4e,EAASzZ,EAAGnF,GAAK,mBAAqB,oBAAsB,YAAcwY,EAAI,IAAMxY,GAEvI0e,GAAWha,OAAM,QACxBC,KAAI,QAAU,wBACdA,KAAI,YAAc,SAASQ,EAAGnF,GAAK,MAAO,aAAe8E,EAAEmS,EAAK9R,EAAGnF,IAAM,QACzE2E,KAAI,KAAO,GACXA,KAAI,KAAO,SAASQ,EAAGnF,GAAK,MAAOuM,GAAEgS,EAAQpZ,EAAGnF,MAChD2E,KAAI,KAAO,GACXA,KAAI,KAAO,SAASQ,EAAGnF,GAAK,MAAOuM,GAAE8R,EAAOlZ,EAAGnF,MAExC0e,EAAWha,OAAM,QACxBC,KAAI,QAAU,gCACdA,KAAI,YAAc,SAASQ,EAAGnF,GAC3B,MAAO,cAAgB8E,EAAEmS,EAAK9R,EAAGnF,IAAMme,EAAS,GAAK,KAClD5R,EAAEsS,EAAK1Z,EAAGnF,KAAO2e,EAAQxZ,EAAGnF,GAAK4e,EAASzZ,EAAGnF,GAAMuM,EAAEqS,EAASzZ,EAAGnF,IAAMuM,EAAEoS,EAAQxZ,EAAGnF,IAAO,IAC5F,MAEL2E,KAAI,IAAM,GACVA,KAAI,IAAM,GACVA,KAAI,QAAUwZ,GACdxZ,KAAI,SAAW,SAASQ,EAAGnF,GACxB,GAAI8e,GAAOH,EAAQxZ,EAAGnF,GAClB+e,EAAQH,EAASzZ,EAAGnF,EACxB,OAAO8e,GAAOC,EAAQxS,EAAEwS,GAASxS,EAAEuS,GAAQvS,EAAEuS,GAAQvS,EAAEwS,IAG/D3J,GAAMjR,OAAM,yBAA0BgH,aACjCxG,KAAI,YAAc,SAASQ,EAAGnF,GAAK,MAAO,aAAe8E,EAAEmS,EAAK9R,EAAGnF,IAAM,QACzE2E,KAAI,KAAO,GACXA,KAAI,KAAO,SAASQ,EAAGnF,GAAK,MAAOuM,GAAEgS,EAAQpZ,EAAGnF,MAChD2E,KAAI,KAAO,GACXA,KAAI,KAAO,SAASQ,EAAGnF,GAAK,MAAOuM,GAAE8R,EAAOlZ,EAAGnF,MAEpDoV,EAAMjR,OAAM,yBAA0BgH,aACjCxG,KAAI,YAAc,SAASQ,EAAGnF,GAC3B,MAAO,cAAgB8E,EAAEmS,EAAK9R,EAAGnF,IAAMme,EAAS,GAAK,KAClD5R,EAAEsS,EAAK1Z,EAAGnF,KAAO2e,EAAQxZ,EAAGnF,GAAK4e,EAASzZ,EAAGnF,GAAMuM,EAAEqS,EAASzZ,EAAGnF,IAAMuM,EAAEoS,EAAQxZ,EAAGnF,IAAO,IAC5F,MAEL2E,KAAI,IAAM,GACVA,KAAI,IAAM,GACVA,KAAI,QAAUwZ,GACdxZ,KAAI,SAAW,SAASQ,EAAGnF,GACxB,GAAI8e,GAAOH,EAAQxZ,EAAGnF,GAClB+e,EAAQH,EAASzZ,EAAGnF,EACxB,OAAO8e,GAAOC,EAAQxS,EAAEwS,GAASxS,EAAEuS,GAAQvS,EAAEuS,GAAQvS,EAAEwS,OAI5Djf,EA1JX,GAIMoE,GAeA8S,EACAI,EACAF,EACAiB,EAtBF5V,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,KACRC,EAAS,KACTkD,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UAErBrD,EAAItH,GAAG8H,MAAMC,SACbgH,EAAI/O,GAAG8H,MAAMC,SACb0R,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9BoS,EAAU,SAASxZ,GAAK,MAAOA,GAAE2Z,MACjCF,EAAW,SAASzZ,GAAK,MAAOA,GAAE4Z,OAClCR,EAAU,SAASpZ,GAAK,MAAOA,GAAE6Z,MACjCX,EAAS,SAASlZ,GAAK,MAAOA,GAAE8Z,KAChCrC,KACA0B,KACAF,GAAc,EACdK,GAAW,EACXnV,EAAQtM,EAAGG,MAAMuQ,eACjBwR,GAAc,EAKdxhB,EAAWF,GAAGE,SAAQ,cAAgB,cAAe,YAAa,aAAc,eAAgB,kBAAmB,mBAAoB,kBAAmB,mBAiMhK,OAzDAoC,GAAMqf,eAAiB,SAASC,EAAYC,GACxCvf,EAAMwf,kBACNpb,EAAUC,OAAM,iCAAoCib,GAC/C1X,QAAO,QAAU2X,IAI1Bvf,EAAMwf,gBAAkB,WACpBpb,EAAUC,OAAM,qCACXuD,QAAO,SAAU,IAQ1B5H,EAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAW6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAClEtB,QAAW4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACpEvC,QAAW6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IAC1DuS,QAAWjM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IAC1DqR,SAAW/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IACtEyR,SAAWnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IACtEuR,QAAWjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACpEwS,QAAWlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACpEiX,QAAW3Q,IAAK,WAAW,MAAO2Q,IAAU1Q,IAAK,SAASvG,GAAGiX,EAAOjX,IACpE2Y,QAAWrS,IAAK,WAAW,MAAOqS,IAAUpS,IAAK,SAASvG,GAAG2Y,EAAO3Y,IACpEyY,SAAWnS,IAAK,WAAW,MAAOmS,IAAWlS,IAAK,SAASvG,GAAGyY,EAAQzY,IACtE8Y,UAAWxS,IAAK,WAAW,MAAOwS,IAAYvS,IAAK,SAASvG,GAAG8Y,EAAS9Y,IACxE4B,IAAW0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAC5DuZ,aAAcjT,IAAK,WAAW,MAAOiT,IAAehT,IAAK,SAASvG,GAAGuZ,EAAYvZ,IAEjFb,GAAQmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IAC7D4G,GAAQN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAKlZ,IAC7DmZ,MAAQ7S,IAAK,WAAW,MAAO0S,MAAazS,IAAK,SAASvG,GAAGgZ,EAAQhZ,IACrEoZ,OAAQ9S,IAAK,WAAW,MAAO2S,MAAc1S,IAAK,SAASvG,GAAGiZ,EAASjZ,IACvEqZ,MAAQ/S,IAAK,WAAW,MAAOsS,IAAWrS,IAAK,SAASvG,GAAG4Y,EAAQ5Y,IACnEsZ,KAAQhT,IAAK,WAAW,MAAOoS,IAAUnS,IAAK,SAASvG,GAAG0Y,EAAO1Y,IAGjEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAqB5B,QAAZ8E,EAAElD,IAAsBkD,EAAElD,IAASF,EAAOE,IAC1DF,EAAOuR,MAAqBjT,QAAZ8E,EAAEmO,MAAsBnO,EAAEmO,MAASvR,EAAOuR,MAC1DvR,EAAOsR,OAAqBhT,QAAZ8E,EAAEkO,OAAsBlO,EAAEkO,OAAStR,EAAOsR,OAC1DtR,EAAOC,KAAqB3B,QAAZ8E,EAAEnD,KAAsBmD,EAAEnD,KAASD,EAAOC,OAE9D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAIlC3I,EAAGG,MAAMqP,YAAY1M,GACdA,GC/NX9C,EAAGI,OAAOmiB,oBAAsB,WAC5B,YAsFA,SAASzf,GAAMsB,GAsbX,MArbAqO,GAAYW,QACZX,EAAYrS,OAAOoiB,GACftF,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAClCjZ,EAAUC,KAAK,SAASC,GA0CpB,QAASme,GAAUta,EAAEnF,GACjBxC,GAAG2G,OAAOrE,EAAMoE,WACXuD,MAAK,SAAW,aAGzB,QAASiY,GAASva,EAAEnF,GAChBoG,EAAMtB,EAAItH,GAAGuE,MAAM+C,EACnBsB,EAAMpG,EAAI0G,KAAK6E,MAAMoU,EAAG/b,OAAOwC,EAAMtB,IACrC8a,IAGJ,QAASC,GAAQ1a,EAAEnF,GACfxC,GAAG2G,OAAOrE,EAAMoE,WACXuD,MAAK,SAAW,QAGrBsH,EAAM3I,MAAQA,EAAMpG,EACpBtC,EAASoiB,YAAY/Q,GAgPzB,QAAS6Q,KACLG,GACKze,MAAM8E,GAIX,IAAI4Z,GAAclgB,EAAM2F,UACxB3F,GAAM2F,SAAS,GACf3F,EAAMqR,SACNrR,EAAM2F,SAASua,GAnTnB,GAAI9b,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,GACjBA,EAAUwD,QAAO,YAAeH,GAAI,EACpC,IAEI5E,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAkBlE,IAhBAzC,EAAMqR,OAAS,WACM,IAAb1L,EACAvB,EAAU9F,KAAK0B,GAEfoE,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAEvDA,EAAMoE,UAAYnG,KAElBgR,EACKmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QAChCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAItC,GAAI4W,GAAY5iB,GAAG6iB,SAASC,OACvB3hB,GAAE,YAAc8gB,GAChB9gB,GAAE,OAAS+gB,GACX/gB,GAAE,UAAYkhB,EAwBnB,MAAKve,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CASX,IAPIoE,EAAUK,UAAS,cAAec,SAItCP,EAAI0a,EAAMpc,SACVmJ,EAAIiT,EAAMtH,SAELqI,EAsBDf,EAAMpI,QAAQ,UAtBH,CACX,GAAIoJ,GAAgBlf,EACfmI,OAAO,SAASL,GAAU,OAAQA,EAAOyR,WACzCpJ,IAAI,SAASrI,EAAOpJ,GACjB,GAAIygB,GAAgBjjB,GAAGkf,OAAOtT,EAAOvD,OAAQ2Z,EAAMjT,IAKnD,OAFIkU,GAAc,IAAK,MAAMA,EAAc,IAAK,OAGvCA,EAAc,GAAKA,EAAc,KAAO,EAAIA,EAAc,KAC1DA,EAAc,GAAKA,EAAc,KAAO,EAAIA,EAAc,OAIvEC,GACAljB,GAAGgJ,IAAIga,EAAe,SAASrb,GAAK,MAAOA,GAAE,KAC7C3H,GAAG6I,IAAIma,EAAe,SAASrb,GAAK,MAAOA,GAAE,KAGjDqa,GAAMpI,QAAQsJ,GAKlBf,EAAGhc,QAAQ,EAAGrC,EAAK,GAAGuE,OAAOzG,OAAS,IACjCqE,OAAO,EAAGd,IACV4X,OAAM,EAEX,IAAIjZ,GAAOqf,EAASva,EAAMpG,EAAGsB,GAGzBsf,EAA2B,EAA4B,OAAS,MAChEtc,EAAOJ,EAAUK,UAAS,+BAAgCjD,MAAMA,IAChEkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,kCAAkCD,OAAM,KACxFyQ,EAAI7Q,EAAKH,OAAM,IA+BnB,IA7BAqW,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAAgB8C,MAAK,iBAAgB,QACtE+S,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAAgB8C,MAAK,iBAAkBmZ,GACxEpG,EAAO9V,OAAM,KAAMC,KAAI,QAAU,mBAAmB8C,MAAK,iBAAgB,QACzE+S,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,mBAG5Bkc,GAGDC,EAAO1c,MAAMzB,GAEbwS,EAAEhR,OAAM,kBACHyW,MAAMtZ,GACNlD,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE4S,EAAEhR,OAAM,kBACHQ,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,MAdtD0S,EAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,SAkBzC2b,EAEE,CACH,GAAIC,KACEzX,IAAK,kBAAmBqR,UAAW0F,GAGzCW,GACK9c,MAAM,KACNkF,OAAK,OAAU,OAAQ,SACvB6X,YAAW,GACX5e,QAAQE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,KAGhD2S,EAAEhR,OAAM,oBACHyW,MAAMqG,GACNtc,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,KACjDrE,KAAK8iB,OAhBT/L,GAAEhR,OAAM,oBAAqBI,UAAS,KAAMc,QAmBjDf,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEnEiY,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,MAI3D,IAAIye,GAAe9f,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEic,cAEtD9c,GAAKH,OAAM,iBAAkBkB,SACzB+b,EAAahiB,QACbkF,EAAKI,OAAM,QAASC,KAAI,QAAU,gBAC7BA,KAAI,IAAMhC,EAAiB,GAC3BgC,KAAI,IAAM,UACV8C,MAAK,cAAgB,OACrByH,KAAKkS,EAAa3P,IAAI,SAAStM,GAAK,MAAOA,GAAEqE,MAAOoL,KAAI,MAAS,sDAItEyM,IACAC,EACKld,MAAMzB,GACN0B,OAAOzB,GACPL,QAAQC,KAAKD,EAAOC,KAAKC,IAAIF,EAAOE,MACpCmC,aAAaV,GACbd,OAAO0B,GACZR,EAAKH,OAAM,mBAAoB/F,KAAKkjB,IAGxC9G,EAAOrW,OAAM,kBACRO,OAAM,QAEXyQ,EAAEhR,OAAM,uBACHQ,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpB4c,EAEKjT,EAAE,SAASpH,GAAK,MAAOA,GAAEoc,QAAQhV,IACjCnI,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAAavZ,EAAKtB,GAAGohB,eAEnE,IAAII,GAAYrM,EAAEhR,OAAM,iBACnByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAASA,EAAE0V,WAAa1V,EAAEic,eAE/DI,GAAUpjB,KAAKohB,GAGfle,EAAK2F,QAAQ,SAAS9B,EAAEnF,GACpBmF,EAAEsc,YAAczhB,GAGpB,IAAI0hB,GAAcpgB,EAAKmI,OAAO,SAAStE,GACnC,OAAQA,EAAE0V,YAAc8G,EAAQxc,KAGhCyc,EAAWzM,EAAEhR,OAAM,oBAAqBI,UAAS,QAChDjD,KAAKogB,EAAa,SAASvc,GAAK,MAAOA,GAAEqE,MAE1CqY,GAAc,SAAS1c,GAEvB,GAAI2c,GAAOvV,EAAEoV,EAAQxc,GACrB,OAAW,GAAP2c,EAAiB,EACjBA,EAAOlf,EAAwBA,EAC5Bkf,EAGXF,GAASnd,QACJC,OAAM,QACN+C,MAAK,eAAgB,GACrBA,MAAK,mBAAkB,SACvBA,MAAK,SAAU,SAAUtC,EAAEnF,GACxB,MAAOwf,GAAMlW,QAAQnE,EAAEA,EAAEsc,eAE5B9c,KAAI,KAAM,GACVA,KAAI,KAAMhC,GACVgC,KAAI,KAAOkd,IACXld,KAAI,KAAOkd,IAEhBD,EACKna,MAAK,iBAAkB,SAAStC,GAE7B,GAAI2c,GAAOvV,EAAEoV,EAAQxc,GACrB,OAAW,GAAP2c,GAAYA,EAAOlf,EAAwB,EACxC,IAEV+B,KAAI,KAAM,GACVA,KAAI,KAAMhC,GACVgC,KAAI,KAAOkd,IACXld,KAAI,KAAOkd,IAEhBD,EAASxc,OAAOC,QAGhB,IAAI0a,IAAYyB,EAAUjd,UAAS,iBAC9BjD,MAAM8E,GACX2Z,IAAUtb,QAAQC,OAAM,QAASC,KAAI,QAAU,gBAC1CA,KAAI,QAAU,GACdA,KAAI,IAAM,IACVA,KAAI,OAAS,OACbA,KAAI,eAAiB,IACrB8C,MAAK,iBAAgB,OACrBrJ,KAAKgiB,GAEVL,GACKpb,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAewa,EAAGxa,EAAEnF,GAAK,QAChE2E,KAAI,SAAW/B,GAGhBsX,IACAC,EACK7U,MAAMR,GACNid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,GAAIrB,IAC/CwZ,UAAUlY,EAAiB,GAEhCuS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,KACvD0R,EAAEhR,OAAM,iBACH/F,KAAK+b,IAGVC,IACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAEhCwS,EAAEhR,OAAM,iBACH/F,KAAKic,IAmBdlF,EAAEhR,OAAM,uBACHxF,GAAE,QAAU,WACTyH,EAAMtB,EAAItH,GAAGiE,MAAM1D,MAAM,GACzBqI,EAAMpG,EAAI0G,KAAK6E,MAAMoU,EAAG/b,OAAOwC,EAAMtB,IAGrCiK,EAAM3I,MAAQA,EAAMpG,EACpBtC,EAASoiB,YAAY/Q,GAErB6Q,MAGRJ,EAAM9hB,SAASiB,GAAE,eAAiB,SAASC,GACvCwH,EAAMpG,EAAIpB,EAAEwgB,WACZhZ,EAAMtB,EAAI6a,EAAGvZ,EAAMpG,GAGnB+O,EAAM3I,MAAQA,EAAMpG,EACpBtC,EAASoiB,YAAY/Q,GAErB6Q,MAGJsB,EAASxjB,SAASiB,GAAE,cAAgB,SAASwG,EAAEnF,GAC3CmF,EAAE0V,UAAY1V,EAAE0V,SAChB0F,GAAYpb,EAAE0V,SAEd9L,EAAMwR,SAAWA,EACjB7iB,EAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAGV2P,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAC1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAGVmQ,EAAiB5jB,SAASiB,GAAE,mBAAqB,SAASC,GACtD4gB,EAAMF,iBACN,IAAI2C,GAAa7C,EAAY8C,EAAgBC,IAsB7C,IApBA7gB,EACKmI,OAAO,SAASL,EAAQpJ,GAErB,MADAoJ,GAAOqY,YAAczhB,GACboJ,EAAOyR,WAElB5T,QAAQ,SAASmC,EAAOpJ,GACrBof,EAAapiB,EAAG4I,kBAAkBwD,EAAOvD,OAAQjH,EAAE0E,YAAaxD,EAAMgF,KACtE0a,EAAML,eAAenf,EAAGof,GAAY,EACpC,IAAI9S,GAAQlD,EAAOvD,OAAOuZ,EACL,oBAAV9S,KACgB,mBAAhB2V,KAA6BA,EAAc3V,GACxB,mBAAnB4V,KAAgCA,EAAiBpiB,EAAMsD,SAAStD,EAAMgF,IAAIwH,EAAM8S,KAC3F+C,EAAQ1hB,MACJ+I,IAAKJ,EAAOI,IACZR,MAAOlJ,EAAMyM,IAAID,EAAO8S,GACxB9V,MAAOA,EAAMF,EAAOA,EAAOqY,kBAKnCU,EAAQ/iB,OAAS,EAAG,CACpB,GAAIgjB,GAAStiB,EAAMoY,SAAStU,OAAOhF,EAAE+C,QACjC0gB,EAAe3b,KAAKC,IAAI7G,EAAMoY,SAASvU,SAAS,GAAK7D,EAAMoY,SAASvU,SAAS,IAC7EkD,EAAY,IAAOwb,EACnBrb,EAAmBhK,EAAG4J,kBAAkBub,EAAQ1Q,IAAI,SAAStM,GAAG,MAAOA,GAAE6D,QAAQoZ,EAAOvb,EACnE,QAArBG,IACAmb,EAAQnb,GAAkBqC,WAAY,GAG9C,GAAIiZ,GAASnI,EAAM3E,aAAa1V,EAAMgF,IAAImd,EAAY7C,GAAaA,EACnEkC,GAAiBpkB,QACZuL,eAAe,SAAStD,EAAEnF,GACvB,MAAOqa,GAAM7E,aAAarQ,KAE7B7D,MAEG0H,MAAOsZ,EACPlZ,OAAQ+Y,MAIhBb,EAAiBre,gBAAgBif,KAGrCZ,EAAiB5jB,SAASiB,GAAE,kBAAmB,SAASC,GACpD4gB,EAAMF,oBAIV5hB,EAASiB,GAAE,cAAgB,SAASC,GACN,mBAAfA,GAAEic,WACTvZ,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAGjC+O,EAAM8L,SAAWjc,EAAEic,UAGA,mBAAZjc,GAAEwH,QACTA,EAAMpG,EAAIpB,EAAEwH,MACZA,EAAMtB,EAAI6a,EAAGvZ,EAAMpG,GAEnB+O,EAAM3I,MAAQxH,EAAEwH,MAEhB2Z,GACKze,MAAM8E,KAGW,mBAAfxH,GAAE2hB,WACTA,EAAW3hB,EAAE2hB,UAGjBzgB,EAAMqR,aAKd1B,EAAYS,UAAS,iCAEdpQ,EA2BX,QAAS6gB,GAAS4B,EAAKjhB,GAEnB,MADKkhB,KAAiBA,EAAkBhD,EAAMjT,KACvCjL,EAAKmQ,IAAI,SAASzM,EAAMhF,GAC3B,IAAKgF,EAAKa,OACN,MAAOb,EAEX,IAAIyd,GAAazd,EAAKa,OAAO0c,EAC7B,IAAkB,MAAdE,EACA,MAAOzd,EAEX,IAAIkB,GAAIsc,EAAgBC,EAAYF,EAGpC,QAAQ,IAAJrc,IAAawc,GAGb1d,EAAKoc,cAAe,EACbpc,IAGXA,EAAKoc,cAAe,EAEpBpc,EAAKa,OAASb,EAAKa,OAAO4L,IAAI,SAASnF,EAAO8S,GAE1C,MADA9S,GAAMiV,SAAUhV,GAAOiW,EAAgBlW,EAAO8S,GAAclZ,IAAM,EAAIA,IAC/DoG,IAGJtH,KA5jBf,GAqBMF,GACAyH,EAtBFiT,EAAQxiB,EAAGI,OAAO4H,OAChBmV,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAClB4L,EAAS9jB,EAAGI,OAAO0jB,SACnBI,EAAWlkB,EAAGI,OAAO0jB,SACrBQ,EAAmBtkB,EAAGkE,uBACtBhE,EAAUF,EAAGI,OAAOF,UAGtBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZzX,EAAQtM,EAAGG,MAAMuQ,eACjBtJ,EAAQ,KACRC,EAAS,KACTwc,GAAa,EACb3G,GAAY,EACZE,GAAY,EACZM,GAAkB,EAClBsG,GAAe,EACfK,GAA0B,EAC1Bd,GAAW,EAGXhZ,EAAKiY,EAAMjY,KACXwH,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACfpM,EAAS,KACT4N,EAAU,SAASxc,GAAK,MAAOA,GAAEwc,SACjCjkB,EAAWF,GAAGE,SAAQ,cAAgB,cAAe,aAErD+H,EAAW,IACXid,GAAe,CAGrB3T,GAAM3I,MAAQ,EACd2I,EAAMwR,SAAWA,EAEjBpG,EAAM9E,OAAM,UAAWW,YAAY,GACnCqE,EAAMhF,OAAM,EAAqB,QAAU,QAE3CnY,EAAQuL,eAAe,SAAStD,EAAGnF,GAC/B,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAC9B0I,gBAAgB,SAASvD,EAAGnF,GAC3B,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAGjCkhB,EAASyB,aAAY,EAMrB,IAAIhD,GAAKniB,GAAG8H,MAAMC,SACZa,GAASpG,EAAG,EAAG8E,EAAG,GAClB2K,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,GAG/Cya,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,WACzCzU,MAAOA,EAAMpG,EACbugB,SAAUA,KAKlBN,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACQlO,SAAhBkO,EAAM3I,QACNA,EAAMpG,EAAI+O,EAAM3I,OACGvF,SAAnBkO,EAAMwR,WACNA,EAAWxR,EAAMwR,UACA1f,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,MAkchDwf,GAAM9hB,SAASiB,GAAE,2BAA6B,SAASoc,GACnD,GAAIzO,IACAxH,EAAGhF,EAAMgF,IAAIiW,EAAIzO,OACjBC,EAAGzM,EAAMyM,IAAIwO,EAAIzO,OACjBhD,MAAOyR,EAAIzO,MAAMhD,MAErByR,GAAIzO,MAAQA,EACZpP,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7Bsc,EAAM9hB,SAASiB,GAAE,0BAA4B,SAASoc,GAClD7d,EAAQgG,QAAO,IAOnB,IAAIsf,GAAkB,IAqGtB,OA/DA1iB,GAAMpC,SAAWA,EACjBoC,EAAM0f,MAAQA,EACd1f,EAAMghB,OAASA,EACfhhB,EAAMohB,SAAWA,EACjBphB,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAMwhB,iBAAmBA,EACzBxhB,EAAMiP,MAAQA,EACdjP,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtE4a,UAAetU,IAAK,WAAW,MAAOsU,IAAYrU,IAAK,SAASvG,GAAG4a,EAAS5a,IAC5Eqb,cAAmB/U,IAAK,WAAW,MAAO+U,IAAgB9U,IAAK,SAASvG,GAAGqb,EAAarb,IACxFkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9Egc,SAAU1V,IAAK,WAAW,MAAO0V,IAAWzV,IAAK,SAASvG,GAAGgc,EAAQhc,IACrEwa,cAAkBlU,IAAK,WAAW,MAAOkU,IAAgBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IACvFoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IACrEuU,WAAejO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAC9EyU,WAAenO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC9E+c,cAAkBzW,IAAK,WAAW,MAAOyW,IAAgBxW,IAAK,SAASvG,GAAG+c,EAAa/c,IAGvFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,KAEjB+X,yBAA0BpV,IAAK,WAAW,MAAOoV,IAA2BnV,IAAK,SAASvG,GACtF0b,EAA0B1b,EACtBA,KAAM,IACN7F,EAAMof,aAAY,GAClBpf,EAAM8iB,YAAW,MAGzBlI,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQ,EAAM,QAAU,UAElC5P,UAAcwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GAC3DF,EAAWE,EACX6Z,EAAM/Z,SAASA,GACf0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,GACfgK,EAAYW,MAAM3K,OAI1BzI,EAAGG,MAAMkW,eAAevT,EAAO0f,GAC/BxiB,EAAGG,MAAMqP,YAAY1M,GAEdA,GC3oBX9C,EAAGI,OAAOylB,YAAc,WACpB,YAmCA,SAAS/iB,GAAMsB,GAyKX,MAxKAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD3P,GAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,GAGjB5C,EAAK2F,QAAQ,SAASmC,EAAQpJ,GAC1BoJ,EAAOvD,OAAOoB,QAAQ,SAASqF,GAC3BA,EAAMlD,OAASpJ,KAMvB,IAAI8iB,GAAc9L,GAAWI,KACzB9V,EAAKmQ,IAAI,SAAStM,GACd,MAAOA,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,EAAEnF,GAAIuM,EAAGsS,EAAK1Z,EAAEnF,GAAI+iB,GAAI5d,EAAE4d,OAIvDje,GAAKnB,OAAOqT,GAAWxZ,GAAGmf,MAAMmG,GAAYrR,IAAI,SAAStM,GAAK,MAAOA,GAAEL,KAClEzB,WAAW6T,IAAW,EAAGvU,GAAiB,IAC/C4J,EAAK5I,OAAOyT,GAAW5Z,GAAGkf,OAAOlf,GAAGmf,MAAMmG,GAAYrR,IAAI,SAAStM,GAAK,MAAOA,GAAEoH,IAAK7N,OAAO4f,KAGzF0E,EAAYzW,EAAE9I,MAAM0U,IAAWvV,GAAmB2J,EAAE5I,SAAS,GAAK,EAAI,GAAK,GAAI4I,EAAE5I,SAAS,GAAK,EAAI,GAAK,IACvG4I,EAAE9I,MAAM0U,IAAWvV,EAAiB,IAGzC6a,EAAKA,GAAM3Y,EACXie,EAAKA,GAAMxW,EAAEuK,OAAOrT,OAAO8I,EAAE,GAAGA,EAAE,IAGlC,IAAIjI,GAAOJ,EAAUK,UAAS,4BAA6BjD,MAAMA,IAC7DkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,+BACnD6V,EAAShW,EAAUE,OAAM,IACrBJ,GAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aACjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAGvE,IAAIwgB,GAAS3e,EAAKH,OAAM,cAAeI,UAAS,aAC3CjD,KAAK,SAAS6D,GAAK,MAAOA,IAAK,SAASA,GAAK,MAAOA,GAAEqE,KAC3DyZ,GAAOxe,QAAQC,OAAM,KAChB+C,MAAK,iBAAmB,MACxBA,MAAK,eAAiB,MAC3Bwb,EAAO7d,OACFoK,gBAAgBC,EAAa,4BAC7BhI,MAAK,iBAAmB,MACxBA,MAAK,eAAiB,MACtBpC,SACL4d,EACKte,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO,sBAAwBA,IAC7D0H,QAAO,QAAU,SAASvC,GAAK,MAAOA,GAAEsT,QAC7CwK,EACKzT,gBAAgBC,EAAa,uBAC7BhI,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,IAE3B,IAAIyb,GAAOD,EAAO1e,UAAS,YACtBjD,KAAK,SAAS6D,GAAK,MAAOA,GAAEU,QACjCqd,GAAK9d,OAAOC,QAEZ,IAAI8d,GAAYD,EAAKze,QAAQC,OAAM,KAC9BC,KAAI,YAAc,SAASQ,EAAEnF,EAAEwY,GAC5B,MAAO,cAAgB1T,EAAEmS,EAAK9R,EAAEnF,IAAsB,IAAhB8E,EAAEpB,aAAsB,KAAO6I,EAAE,GAAK,MAE/E5N,GAAE,YAAc,SAASwG,EAAEnF,GACxBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsb,kBACL1X,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,SAASwG,EAAEnF,GACvBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsF,iBACL1B,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAASmG,kBACLvC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,QAAU,SAASwG,EAAEnF,GACpB,GAAIojB,GAAUrlB,IACdL,GAASqG,cACLzC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5B1F,MAAOvE,GAAGuE,MACVqhB,QAASA,IAEb5lB,GAAGuE,MAAMshB,oBAEZ1kB,GAAE,WAAa,SAASwG,EAAEnF,GACvBtC,EAAS4lB,iBACLhiB,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,UAEhCjK,GAAGuE,MAAMshB,mBAGjBF,GAAUze,OAAM,QACXC,KAAI,SAAW,GACfA,KAAI,QAA0B,GAAhBG,EAAEpB,YAAmBpC,EAAKlC,QAEzC4jB,GACAG,EAAUze,OAAM,QACXC,KAAI,cAAgB,UAGzBue,EAAK/e,OAAM,QACN+K,KAAK,SAAS/J,EAAEnF,GAAK,MAAOujB,GAAY1E,EAAK1Z,EAAEnF,MAC/CwP,gBAAgBC,EAAa,0BAC7B9K,KAAI,IAAsB,GAAhBG,EAAEpB,YAAmB,GAC/BiB,KAAI,IAAM,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAIuM,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,GAAK,GAAK,MAIjF2W,EAAK3e,UAAS,QAASc,SAG3B6d,EACKve,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAI,kBAAoB,oBACzEyH,MAAK,OAAS,SAAStC,EAAEnF,GAAK,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAEnF,KACxDyH,MAAK,SAAW,SAAStC,EAAEnF,GAAK,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAEnF,KAC1DmE,OAAM,QACNQ,KAAI,QAAU6e,GACdhU,gBAAgBC,EAAa,0BAC7B9K,KAAI,QAA0B,GAAhBG,EAAEpB,YAAmBpC,EAAKlC,QAC7C8jB,EAAK1T,gBAAgBC,EAAa,qBAE7B9K,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,GAAIwC,GAAOsC,EAAEmS,EAAK9R,EAAEnF,IAAsB,IAAhB8E,EAAEpB,YACxBjB,EAAMoc,EAAK1Z,EAAEnF,GAAK,EACduM,EAAE,GACEA,EAAE,GAAKA,EAAEsS,EAAK1Z,EAAEnF,IAAM,EAC1BuM,EAAE,GAAK,EACPA,EAAEsS,EAAK1Z,EAAEnF,GAEjB,OAAO,aAAewC,EAAO,KAAOC,EAAM,MAE7C0B,OAAM,QACNQ,KAAI,SAAW,SAASQ,EAAEnF,GACvB,MAAQ0G,MAAKL,IAAIK,KAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,IAAK,KAKxDkR,EAAK3Y,EAAEgS,OACPiM,EAAKxW,EAAEuK,SAIXrH,EAAYS,UAAS,yBACdpQ,EAtMX,GAIMoE,GASA8S,EACAI,EACAF,EACAiB,EAUFsF,EAAIsF,EA1BJxgB,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,IACTkD,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UAErBrD,EAAItH,GAAG8H,MAAMsI,UACbrB,EAAI/O,GAAG8H,MAAMC,SACb0R,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9B+R,GAAU,GACVhV,EAAQtM,EAAGG,MAAMuQ,eACjBsV,GAAa,EACbO,EAAc/lB,GAAGmM,OAAM,QAKvBjM,EAAWF,GAAGE,SAAQ,aAAe,eAAgB,kBAAmB,mBAAoB,kBAAmB,mBAAoB,aACnI8lB,EAAY,cACZ/d,EAAW,IAQbgK,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EAyNjD,OAvCA3F,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAU6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACjEtB,QAAU4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACnE2Y,QAAUrS,IAAK,WAAW,MAAOqS,IAAUpS,IAAK,SAASvG,GAAG2Y,EAAO3Y;AZ1N3E,GAAG,AY2NKqd,CZ3NJ,EAAE,CAAC,CAAC,CAAC,GAAG,GY2NS/W,IAAK,WAAW,MAAO+W,IAAc9W,IAAK,SAASvG,GAAGqd,EAAWrd,IAC9Eb,GAAUmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IAC/D4G,GAAUN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAKlZ,IAC/DvC,QAAU6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IACzDuS,QAAUjM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IACzDqR,SAAU/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IACrEyR,SAAUnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IACrEuR,QAAUjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACnEwS,QAAUlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACnE4d,aAAiBtX,IAAK,WAAW,MAAOsX,IAAerX,IAAK,SAASvG,GAAG4d,EAAY5d,IACpF4B,IAAc0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAC/D6d,WAAYvX,IAAK,WAAW,MAAOuX,IAAatX,IAAK,SAASvG,GAAG6d,EAAU7d,IAG3EpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9BF,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,OAI1BzI,EAAGG,MAAMqP,YAAY1M,GAEdA,GC3PX9C,EAAGI,OAAOqmB,iBAAmB,WACzB,YA0DA,SAAS3jB,GAAMsB,GA6IX,MA5IAqO,GAAYW,QACZX,EAAYrS,OAAOsmB,GACfxJ,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAElCjZ,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EASlE,IAPAzC,EAAMqR,OAAS,WACXzT,EAAS4c,eACTpW,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAEnDA,EAAMoE,UAAYnG,OAGbuD,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,SAItCP,EAAI4e,EAAYtgB,SAChBmJ,EAAImX,EAAYxL,SAASqC,OAAM,EAG/B,IAAIjW,GAAOJ,EAAUK,UAAS,oCAAqCjD,MAAMA,IACrEkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,uCAAuCD,OAAM,KAC7F+V,EAAYD,EAAO9V,OAAM,QACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAC5BD,OAAM,KAAMC,KAAI,QAAU,eAC1BD,OAAM,QAEX8V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,eACxC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBAE1BwQ,EAAExQ,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAG/Doe,GAGDC,EAAO1c,MAAMzB,GAEbwS,EAAEhR,OAAM,kBACHyW,MAAMtZ,GACNlD,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE+B,EAAKH,OAAM,kBACNQ,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,MAdtD0S,EAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,SAiB1CqV,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAI3D+gB,EACKtf,MAAMzB,GACN0B,OAAOzB,EAEZ,IAAI+X,GAAWxF,EAAEhR,OAAM,gBAClByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAe/C,IAbAF,EAASxP,aAAa/M,KAAKslB,GAG3BjJ,EAAU/V,OAAM,YACXC,KAAI,KAAO,mBAAqB+e,EAAYnc,MAC5C7C,OAAM,QAEXyQ,EAAEhR,OAAM,oBAAuBuf,EAAYnc,KAAO,SAC7C5C,KAAI,QAAUG,EAAEpB,aAAegT,EAAgB,EAAI,IACnD/R,KAAI,SAAW,IACfA,KAAI,KAAOG,EAAEpB,aAAegT,EAAgB,EAAI,IAGjDwD,EAAW,CACXC,EACK7U,MAAMR,GACNid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAUlY,EAAiB,GAEhCuS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,gBAAkB4H,EAAE9I,QAAQ,IAAOigB,EAAYV,cAAgBzW,EAAE5I,SAAS,GAAK,EAAK,GAAK,IAAM,KACtHwR,EAAEhR,OAAM,iBAAkB/F,KAAK+b,EAE/B,IAAI/D,GAASjB,EAAEhR,OAAM,iBAAkBI,UAAS,IAC5CmS,IACAN,EACK7R,UAAS,QACTI,KAAI,YAAc,SAASQ,EAAEnF,EAAEwY,GAAK,MAAO,gBAAkBA,EAAI,GAAK,EAAI,IAAM,MAAQ,MAG7FlC,GACAF,EACK7R,UAAS,cACTI,KAAI,YAAc,UAAY2R,EAAe,SAC7C7O,MAAK,cAAgB6O,EAAe,EAAI,QAAU,OAGvDqN,GACAxO,EAAE5Q,UAAS,cACNnG,KAAKpB,EAAGG,MAAM+W,UAAWpU,EAAMqa,MAAMzW,aAI9C0W,IACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAEhCwS,EAAEhR,OAAM,iBAAkB/F,KAAKic,IAInClF,EAAEhR,OAAM,qBACHQ,KAAI,KAAM,GACVA,KAAI,KAAI,GAAuBhC,EAAiBA,GAChDgC,KAAI,KAAO4H,EAAE,IACb5H,KAAI,KAAO4H,EAAE,MAItBkD,EAAYS,UAAS,+BACdpQ,EAjMX,GAmBMgF,GACAyH,EApBFmX,EAAc1mB,EAAGI,OAAOylB,cACtB1I,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OACzB4L,EAAS9jB,EAAGI,OAAO0jB,SACZ5jB,EAAUF,EAAGI,OAAOF,UAGtBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZ3c,EAAQ,KACRC,EAAS,KACTiF,EAAQtM,EAAGG,MAAMsQ,WACxBoT,GAAa,EACN3G,GAAY,EACZE,GAAY,EACZM,GAAkB,EAClBhE,GAAgB,EAChBiN,GAAa,EACbrN,EAAe,EAGfvC,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,eAAc,aACpC+H,EAAW,GAGjB0U,GACK9E,OAAM,UACNU,YAAW,GACXP,WAAW,SAASrQ,GAAK,MAAOA,KAErCkV,EACKhF,OAAM,EAAqB,QAAU,QACrCG,WAAWhY,GAAGmM,OAAM,SAGzBzM,EACKuI,SAAS,GACT+C,eAAc,GACdC,eAAe,SAAStD,EAAGnF,GACxB,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAEhC2I,aAAa,SAASxD,EAAGnF,GACtB,MAAOma,GAAM3E,aAAarQ,EAAGnF,IAOrC,IAAIyP,GAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EA+NjD,OAzEAie,GAAYhmB,SAASiB,GAAE,2BAA6B,SAASoc,GACzDA,EAAW,QACPvR,IAAK1J,EAAMgF,IAAIiW,EAAIzZ,MACnB0H,MAAOlJ,EAAMyM,IAAIwO,EAAIzZ,MACrBgI,MAAOyR,EAAIzR,OAEfpM,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7BwgB,EAAYhmB,SAASiB,GAAE,0BAA4B,SAASoc,GACxD7d,EAAQgG,QAAO,KAGnBwgB,EAAYhmB,SAASiB,GAAE,2BAA6B,SAASoc,GACzD7d,MAOJ4C,EAAMpC,SAAWA,EACjBoC,EAAM4jB,YAAcA,EACpB5jB,EAAMghB,OAASA,EACfhhB,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAC7Ekb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IACvE+Q,eAAgBzK,IAAK,WAAW,MAAOyK,IAAiBxK,IAAK,SAASvG,GAAG+Q,EAAc/Q,IACvF2Q,cAAgBrK,IAAK,WAAW,MAAOqK,IAAgBpK,IAAK,SAASvG,GAAG2Q,EAAa3Q,IACrFge,YAAc1X,IAAK,WAAW,MAAO0X,IAAczX,IAAK,SAASvG,GAAGge,IAAahe,IACjFuU,WAAYjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAC3EyU,WAAYnO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC3EoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAGrEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClBie,EAAYje,SAASA,GACrB0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,KAEnB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1B+d,EAAYpa,MAAMA,GACzBwX,EAAOxX,MAAMA,KAEVoR,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQ,EAAM,QAAU,YAItCrY,EAAGG,MAAMkW,eAAevT,EAAO4jB,GAC/B1mB,EAAGG,MAAMqP,YAAY1M,GAEdA,GCxRX9C,EAAGI,OAAOwmB,aAAe,WACrB,YA8BA,SAAS9jB,GAAMsB,GA8DX,MA7DAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GACpB,GACIuiB,IADkBzf,GAAkB,MAAT8Q,EAAe3S,EAAOC,KAAOD,EAAOuR,MAAQvR,EAAOE,IAAMF,EAAOsR,QAC3E,KAARqB,EAAc,IAAM,KAC5BhR,EAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,GAKjBoR,EAASA,GAAUhQ,CAQnB,IAAIhB,GAAOJ,EAAUK,UAAS,qBAAsBjD,MAAMA,IACtDkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,wBAEnDwQ,GADS3Q,EAAUE,OAAM,KACrBJ,EAAKH,OAAM,KAEnBG,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAKvE,IAAIqhB,GAAW3O,EAAE5Q,UAAS,aACrBjD,KAAK,SAAS6D,GAAK,MAAOA,IAAK,SAASA,GAAK,MAAOA,GAAEqE,KAE3Dsa,GAASrf,QAAQC,OAAM,KACvBof,EACKnf,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO,qBAAuBA,IAC5DyH,MAAK,SAAW,SAAStC,EAAEnF,GAAK,MAAOsJ,GAAMnE,EAAGnF,IAErD,IAAI+jB,GAAOD,EAASvf,UAAS,eAAkB2Q,GAC1C5T,KAAK,SAAS6D,GAAK,MAAOA,GAAEU,QACjCke,GAAKtf,QAAQC,OAAM,QACdC,KAAKuQ,EAAO,IAAK,SAAS/P,EAAEnF,GAAK,MAAOsV,GAAO0O,EAAQ7e,EAAEnF,MACzD2E,KAAKuQ,EAAO,IAAK,SAAS/P,EAAEnF,GAAK,MAAOsV,GAAO0O,EAAQ7e,EAAEnF,MAC9DyP,EAAYtE,WAAW2Y,EAAS1e,OAAOb,UAAS,eAAkB2Q,GAAO,aAEpEvQ,KAAKuQ,EAAO,IAAK,SAAS/P,EAAEnF,GAAK,MAAOsF,GAAM0e,EAAQ7e,EAAEnF,MACxD2E,KAAKuQ,EAAO,IAAK,SAAS/P,EAAEnF,GAAK,MAAOsF,GAAM0e,EAAQ7e,EAAEnF,MACxDyH,MAAK,iBAAmB,GACxBpC,SACL0e,EACKpf,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO,UAAYkV,EAAO,WAAaA,EAAO,IAAMlV,IAClF2E,KAAKkf,EAAQ,IAAK,GAClBlf,KAAKkf,EAAQ,IAAKnX,GACvB+C,EAAYtE,WAAW4Y,EAAM,QAExBpf,KAAKuQ,EAAO,IAAK,SAAS/P,EAAEnF,GAAK,MAAOsF,GAAM0e,EAAQ7e,EAAEnF,MACxD2E,KAAKuQ,EAAO,IAAK,SAAS/P,EAAEnF,GAAK,MAAOsF,GAAM0e,EAAQ7e,EAAEnF,MAG7DsV,EAAShQ,EAAMwR,SAGnBrH,EAAYS,UAAS,0BACdpQ,EAvFX,GAmBIwV,GAnBA/S,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRsI,EAAO,EACPwI,EAAO,IACP8O,EAAU,SAAS7e,GAAK,MAAOA,GAAE+P,IACjC5L,EAAQtM,EAAGG,MAAMuQ,eACjBpI,EAAQ9H,GAAG8H,MAAMC,SAEjBE,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,aAWxB+R,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EAmIjD,OAzDA3F,GAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAC1CA,EAAMpC,SAAWA,EAEjBoC,EAAMyC,OAAS,SAASoD,GACpB,MAAKtH,WAAUe,QACfmD,EAAOE,IAA4B,mBAAZkD,GAAElD,IAAwBkD,EAAElD,IAASF,EAAOE,IACnEF,EAAOuR,MAA4B,mBAAZnO,GAAEmO,MAAwBnO,EAAEmO,MAASvR,EAAOuR,MACnEvR,EAAOsR,OAA4B,mBAAZlO,GAAEkO,OAAwBlO,EAAEkO,OAAStR,EAAOsR,OACnEtR,EAAOC,KAA4B,mBAAZmD,GAAEnD,KAAwBmD,EAAEnD,KAASD,EAAOC,KAC5D1C,GALuByC,GAQlCzC,EAAMsE,MAAQ,SAASuB,GACnB,MAAKtH,WAAUe,QACfgF,EAAQuB,EACD7F,GAFuBsE,GAKlCtE,EAAMoV,KAAO,SAASvP,GAClB,MAAKtH,WAAUe,QACf8V,EAAOvP,EACA7F,GAFuBoV,GAKlCpV,EAAM4M,KAAO,SAAS/G,GAClB,MAAKtH,WAAUe,QACfsN,EAAO/G,EACA7F,GAFuB4M,GAKlC5M,EAAMkkB,QAAU,SAASre,GACrB,MAAKtH,WAAUe,QACf4kB,EAAUxmB,GAAG4V,QAAQzN,GACd7F,GAFuBkkB,GAKlClkB,EAAMwF,MAAQ,SAASK,GACnB,MAAKtH,WAAUe,QACfkG,EAAQK,EACD7F,GAFuBwF,GAKlCxF,EAAMwJ,MAAQ,SAAS3D,GACnB,MAAKtH,WAAUe,QACfkK,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GACnB7F,GAFuBwJ,GAKlCxJ,EAAM2F,SAAW,SAASE,GACtB,MAAKtH,WAAUe,QACfqG,EAAWE,EACX8J,EAAYW,MAAM3K,GACX3F,GAHuB2F,GAQ3B3F,GC9JX9C,EAAGI,OAAO6mB,MAAQ,SAAS5V,GACvB,YAqCA,SAASvO,GAAMsB,GAuLX,MAtLAqO,GAAYW,QACZX,EAAYrS,OAAOiR,GACf6L,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAElCjZ,EAAUC,KAAK,SAASC,GAoIpB,QAAS4iB,GAAW/e,GAChB,GAAIvG,KAAW,KAALuG,GACNL,EAAIlG,EAAI,EAAI,GACZ2N,EAAI3J,EAAkB,CAC1B,OAAO,IAAO,GAAMkC,EAAK,IAAMyH,EACzB,YAAc3N,EAAI,IAAO,IAAMkG,EAAK,KAAOyH,EAAI,GAC/C,KAAO,EAAIA,EAAI,GACf,YAAc3N,EAAI,IAAO,GAAMkG,EAAK,IAAO,EAAIyH,EAC/C,KACO,IAAMzH,EAAK,KAAOyH,EAAI,GAC7B,KAAO,EAAIA,EAAI,GACf,IAAO,IAAMzH,EAAK,KAAOyH,EAAI,GAC7B,KAAO,EAAIA,EAAI,GAIzB,QAAS4X,KACAC,EAAMC,SAASD,EAAM1H,OAAO4H,GACjCC,EACKjjB,MAAM8iB,EAAMC,QAAUvf,EAAEnB,SAAW2gB,IACnCjjB,KAAK,SAAS8D,EAAEnF,GACb,GAAIwkB,GAAY1f,EAAEK,EAAE,IAAML,EAAErB,QAAQ,GAChCghB,EAAa9hB,EAAiBmC,EAAEK,EAAE,GACtC3H,IAAG2G,OAAOpG,MAAMoG,OAAM,SACjBQ,KAAI,QAAuB,EAAZ6f,EAAgB,EAAIA,GAExChnB,GAAG2G,OAAOpG,MAAMoG,OAAM,UACjBQ,KAAI,IAAMG,EAAEK,EAAE,KACdR,KAAI,QAAuB,EAAb8f,EAAiB,EAAIA,KAKpD,QAASC,GAAQC,GACbL,EAAcF,EAAMC,QAAU,KAAOD,EAAM1H,QAC3C,IAAIA,GAAS0H,EAAMC,QAAUvf,EAAEnB,SAAWygB,EAAM1H,QAChDhf,GAAS0mB,OAAO1H,OAAQA,EAAQ0H,MAAOA,IACvCD,IACIQ,GACAjnB,EAASgnB,QAAQhI,GA1KzB,GAAIxY,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD/T,GAAMqR,OAAS,WACM,IAAb1L,EACAvB,EAAU9F,KAAM0B,GAEhBoE,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAGvDA,EAAMoE,UAAYnG,KAGlB+G,EAAIuJ,EAAQjL,SACZmJ,EAAI8B,EAAQ6J,QAGZ,IAAI5T,GAAOJ,EAAUK,UAAS,cAAejD,MAAMA,IAC/CkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,iBAAiBD,OAAM,KACvEyQ,EAAI7Q,EAAKH,OAAM,IAEnBG,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvE+X,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBAAiBD,OAAM,QACxD8V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,sBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBAE7B+V,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAG3DwS,EAAEhR,OAAM,uBACHQ,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpByL,EACKjK,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAE9C,IAAI+J,GAAczP,EAAEhR,OAAM,mBACrByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAE/Crd,IAAG2N,WAAWyZ,GAAaxmB,KAAKiQ,GAGhC+V,EACKtf,EAAEA,GACFnG,GAAE,QAAU,WACT+lB,EAAQG,KAGhBT,EAAMzlB,GAAE,WAAa,WACZkmB,GACDnnB,EAASgnB,QAAQN,EAAMC,QAAUvf,EAAEnB,SAAWygB,EAAM1H,YAIxD4H,GAAaF,EAAM1H,OAAO4H,EAE9B,IAAIC,GAAUpP,EAAEhR,OAAM,uBAAwBI,UAAS,KAClDjD,MAAMgjB,GAAeF,EAAM1H,WAE5BoI,EAAeP,EAAQ9f,QACtBC,OAAM,IAEXogB,GAAapgB,OAAM,QACdC,KAAI,QAAU,QACdA,KAAI,IAAM,GACVA,KAAI,IAAM,GACVA,KAAI,SAAW/B,GAEpBkiB,EAAapgB,OAAM,QACdC,KAAI,QAAU,SACdA,KAAI,IAAM,GACVA,KAAI,IAAM,GACVA,KAAI,SAAW/B,EAEpB,IAAImiB,GAAS5P,EAAEhR,OAAM,kBAChB/F,KAAKgmB,EACVW,GAAOxgB,UAAS,QACXI,KAAI,SAAW/B,GACpBmiB,EAAOxgB,UAAS,WAAYG,OAAM,QAASC,KAAI,IAAMuf,GAErDQ,GAAQ,GAERvP,EAAEhR,OAAM,uBACHQ,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEhBsX,IACAC,EAAM7U,MAAMR,GACPid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAUlY,EAAiB,GAEhCuS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,KACvDjG,GAAG2N,WAAWgK,EAAEhR,OAAM,kBACjB/F,KAAK+b,IAGVC,IACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAEhCnF,GAAG2N,WAAWgK,EAAEhR,OAAM,kBACjB/F,KAAKic,IAGdlF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,OAuD3DgM,EAAYS,UAAS,mBACdpQ,EAtNX,GAcMgF,GACAyH,EAfF8B,EAAUA,GAAWrR,EAAGI,OAAO4H,OAC7BmV,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAClBkP,EAAQ5mB,GAAG0V,IAAIkR,QAGjB7hB,GAAUE,IAAK,GAAIqR,MAAO,EAAGD,OAAQ,GAAIrR,KAAM,GAC7C8G,EAAQtM,EAAGG,MAAMuQ,eACjBtJ,EAAQ,KACRC,EAAS,GACT6V,GAAY,EACZE,GAAY,EACZM,GAAkB,EAIlB4J,EAAc,KACd7e,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,QAAU,UAAW,aAC3CmnB,GAAe,CAGrBxW,GAAQ6Q,aAAY,GACpB7Q,EAAQ2W,YAAY,SAAS7f,GAAK,OAAO,GAMzC,IAAIsK,GAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EAgQjD,OA1DA3F,GAAMpC,SAAWA,EACjBoC,EAAMuO,QAAUA,EAChBvO,EAAMskB,MAAQA,EACdtkB,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEuU,WAAiBjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAChFyU,WAAenO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC9E2e,aAAcrY,IAAK,WAAW,MAAOqY,IAAepY,IAAK,SAASvG,GAAG2e,EAAY3e,IACjFkf,cAAe5Y,IAAK,WAAW,MAAO4Y,IAAgB3Y,IAAK,SAASvG,GAAGkf,EAAalf,IAGpFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClB4I,EAAQ5I,SAASA,GACjB0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,KAEnB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1B0I,EAAQ/E,MAAMA,KAElB2b,aAAchZ,IAAK,WAAW,MAAOoC,GAAQ4W,eAAiB/Y,IAAK,SAASvG,GACxE0I,EAAQ4W,YAAYtf,KAExBuf,aAAcjZ,IAAK,WAAW,MAAOkO,GAAM3E,cAAgBtJ,IAAK,SAASvG,GACrEwU,EAAM3E,WAAW7P,KAErBwf,aAAclZ,IAAK,WAAW,MAAOoO,GAAM7E,cAAgBtJ,IAAK,SAASvG,GACrE0U,EAAM7E,WAAW7P,KAErBb,GAAImH,IAAK,WAAW,MAAOoC,GAAQvJ,KAAOoH,IAAK,SAASvG,GACpD0I,EAAQvJ,EAAEa,KAEd4G,GAAIN,IAAK,WAAW,MAAOoC,GAAQ9B,KAAOL,IAAK,SAASvG,GACpD0I,EAAQ9B,EAAE5G,KAEd+U,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQqF,EAAkB,QAAU,YAIlD1d,EAAGG,MAAMkW,eAAevT,EAAOuO,GAC/BrR,EAAGG,MAAMqP,YAAY1M,GAEdA,GCpSX9C,EAAGI,OAAOgoB,mBAAqB,WAC3B,YAkCA,SAAStlB,GAAMsB,GAsGX,MArGAqO,GAAYW,QAEZhP,EAAUC,KAAK,SAASC,GACtB4C,EAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,EAEjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAOlE,IALA2B,EACSS,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,KAGnBtB,GAASA,EAAK8M,OAAU9M,EAAK+jB,OAE9B,MADAroB,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,SAEtCnB,EAAUK,UAAS,KAAMc,QAGzB,IAAIigB,GAAe,GAAIC,IACvBjkB,GAAK+jB,MAAMpe,QAAQ,SAASG,GAC1B,GAAIoe,GAAOzZ,OAAOyZ,KAAKpe,EACvBoe,GAAKve,QAAQ,SAASuC,GACpB8b,EAAaG,IAAIjc,MAIrB,IAAIkc,GAAQloB,GAAGmoB,OAAOD,QACfL,MAAM/jB,EAAK+jB,OACXjX,MAAM9M,EAAK8M,OACX1B,MAAM/J,EAAgBC,IACtBgjB,aAAaA,GACbC,SAASA,GACTC,aAAaC,GACbC,OAAOA,GACP5d,QAAQA,GACR6d,MAAMA,GACNC,MAAMA,GACNC,QAEHC,EAAOliB,EAAUK,UAAS,SACvBjD,KAAKA,EAAK8M,OACV3J,QAAQC,OAAM,QACdC,KAAI,QAAU,iBACd8C,MAAK,eAAiB,SAAStC,GAAK,MAAOuB,MAAK2f,KAAKlhB,EAAE6D,SAE1D5B,EAAOlD,EAAUK,UAAS,SACvBjD,KAAKA,EAAK+jB,OACV5gB,QACAC,OAAM,KACNC,KAAI,QAAU,iBACdvG,KAAKsnB,EAAMpF,KAElBlZ,GACG1C,OAAM,UACNC,KAAI,IAAM2hB,GACV7e,MAAK,OAAS,SAAStC,GAAK,MAAOmE,GAAMnE,KACzCxG,GAAE,YAAc,SAASoc,GACxB7W,EAAUC,OAAM,cAAiB4W,EAAI0G,YAAc,cAAgB1G,EAAIqE,YAClEza,KAAI,KAAOoW,EAAIwL,IACpBriB,EAAUC,OAAM,cAAiB4W,EAAI0G,YAAc,cAAgB1G,EAAIqE,YAClEza,KAAI,KAAOoW,EAAIyL,GAGpB,IAAIC,GAAYnd,EAAMyR,EACtBA,GAAI3R,UACJkc,EAAare,QAAQ,SAASyf,GAC5B3L,EAAI3R,OAAO3I,MACT6I,MAAOmd,EACPjd,IAAOkd,EACP1d,MAAO+R,EAAI2L,OAGfxpB,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAE1BvE,GAAE,WAAc,SAASwG,GACxBjI,EAAQgG,QAAO,KAGnBhG,EAAQwL,gBAAgB,SAASvD,GAAI,MAAO,SAG5CwhB,EAAWP,GACXQ,EAAWxf,GAEXse,EAAM/mB,GAAE,OAAS,WACbynB,EAAKzhB,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAEuL,OAAO5L,IACzCH,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAEuL,OAAOnE,IACzC5H,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAEjD,OAAO4C,IACzCH,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAEjD,OAAOqK,IAE9CnF,EAAKzC,KAAI,YAAc,SAASQ,GAC9B,MAAO,aAAeA,EAAEL,EAAI,KAAOK,EAAEoH,EAAI,UAK1CzM,EAnIX,GAAIyC,IAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,GACTH,EAAY,KACZxG,EAAWF,GAAGE,SAAQ,aACtB4L,EAAQtM,EAAGG,MAAMsQ,UAAQ,SACzBvQ,EAAeF,EAAGI,OAAOF,UACzB6W,EAAS,KAET6R,EAAe,GACfC,EAAW,GACXE,EAAW,GACXC,EAAS,KACT5d,EAAU,GACV6d,EAAQ,GACRC,EAAQ,GACRI,EAAS,EAEVM,EAAa,SAASvB,KACtBsB,EAAa,SAASvY,KAQvBqB,EAAczS,EAAGG,MAAMsS,YAAY/R,EA0JvC,OA3CAoC,GAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAY6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACnEtB,QAAY4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAGrEigB,cAAc3Z,IAAK,WAAW,MAAO2Z,IAAgB1Z,IAAK,SAASvG,GAAGigB,EAAajgB,IACnFkgB,UAAc5Z,IAAK,WAAW,MAAO4Z,IAAY3Z,IAAK,SAASvG,GAAGkgB,EAASlgB,IAC3EogB,UAAc9Z,IAAK,WAAW,MAAO8Z,IAAY7Z,IAAK,SAASvG,GAAGogB,EAASpgB,IAC3EqgB,QAAc/Z,IAAK,WAAW,MAAO+Z,IAAU9Z,IAAK,SAASvG,GAAGqgB,EAAOrgB,IACvEyC,SAAc6D,IAAK,WAAW,MAAO7D,IAAW8D,IAAK,SAASvG,GAAGyC,EAAQzC,IACzEsgB,OAAcha,IAAK,WAAW,MAAOga,IAAS/Z,IAAK,SAASvG,GAAGsgB,EAAMtgB,IACrEugB,OAAcja,IAAK,WAAW,MAAOia,IAASha,IAAK,SAASvG,GAAGugB,EAAMvgB,IACrE2gB,QAAcra,IAAK,WAAW,MAAOqa,IAAUpa,IAAK,SAASvG,GAAG2gB,EAAO3gB,IAGvEb,GAAImH,IAAK,WAAW,MAAOgL,OAAQ/K,IAAK,SAASvG,GAAGsR,KAAKzZ,GAAG4V,QAAQzN,KACpE4G,GAAIN,IAAK,WAAW,MAAO4S,OAAQ3S,IAAK,SAASvG,GAAGkZ,KAAKrhB,GAAG4V,QAAQzN,KAGpEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9BoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IACrEihB,YAAa3a,IAAK,WAAW,MAAO2a,IAAc1a,IAAK,SAASvG,GAC5DihB,EAAajhB,IAEjBghB,YAAa1a,IAAK,WAAW,MAAO0a,IAAcza,IAAK,SAASvG,GAC5DghB,EAAahhB,MAIrB7F,EAAMpC,SAAWA,EACjBoC,EAAM5C,QAAUA,EAChBF,EAAGG,MAAMqP,YAAY1M,GACdA,GC3LX9C,EAAGI,OAAOypB,cAAgB,WACtB,YAuBA,SAAS/mB,GAAMsB,GA2QX,QAAS0lB,GAAa3hB,EAAEnF,GACpB,MAAW,WAAR+mB,EAA0B,OAC1BC,EACQ7hB,EAAE8hB,WAAa3d,EAAMnE,EAAEnF,GAAK,OAC3BgnB,EAAL,OACM7hB,EAAE0V,SAAWvR,EAAMnE,EAAEnF,GAAK,OAI3C,QAASknB,GAAW/hB,EAAEnF,GAClB,MAAGgnB,IAAoB,WAARD,EACJ5hB,EAAE8hB,WAAa,OAAS3d,EAAMnE,EAAEnF,GAE9BmF,EAAE0V,SAAW,OAASvR,EAAMnE,EAAEnF,GAI/C,MA3RAoB,GAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9C5P,EAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EAGjB,IAAII,GAAOJ,EAAUK,UAAS,eAAgBjD,MAAMA,IAEhD6T,GADS7Q,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,kBAAkBD,OAAM,KACpEJ,EAAKH,OAAM,KAEnBG,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAEvE,IAUI0kB,GAVA/d,EAAS+L,EAAE5Q,UAAS,cACnBjD,KAAK,SAAS6D,GACX,MAAW,WAAR4hB,EAA0B5hB,EAEtBA,EAAEsE,OAAO,SAAS8F,GACrB,MAAOyX,IAAW,GAAQzX,EAAE0X,eAGpCG,EAAche,EAAO3E,QAAQC,OAAM,KAAMC,KAAI,QAAU,YAI3D,IAAW,WAARoiB,EACCK,EAAY1iB,OAAM,UACb+C,MAAK,eAAiB,GACtB9C,KAAI,QAAO,oBACXA,KAAI,IAAM,GAEfwiB,EAAc/d,EAAOjF,OAAM,cACxB,IAAY,WAAR4iB,EAAmB,CAC1BK,EAAY1iB,OAAM,QACb+C,MAAK,eAAiB,GACtB9C,KAAI,QAAO,oBACXA,KAAI,KAAO,GACXA,KAAI,KAAO,GAEhBwiB,EAAc/d,EAAOjF,OAAM,QAE3BijB,EAAY1iB,OAAM,KACbC,KAAI,QAAU,gBACd0iB,SAAQ,YAAW,0KACnB1iB,KAAI,YAAc,8BAEvB,IAAI2iB,GAAiBle,EAAOjF,OAAM,gBAElCmjB,GAAejmB,KAAK,SAAS8D,EAAEnF,GAC3BxC,GAAG2G,OAAOpG,MAAMwG,UAAS,QACpBI,KAAI,SAAWmiB,EAAa3hB,EAAEnF,MAI3ConB,EAAY1iB,OAAM,QACbC,KAAI,cAAgB,SACpBA,KAAI,QAAO,kBACXA,KAAI,KAAO,SACXA,KAAI,KAAO,IAEhB,IAAI4iB,GAAane,EAAOjF,OAAM,sBAE9BiF,GACKzK,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAAS8pB,gBAAgBriB,EAAEnF,KAE9BrB,GAAE,WAAa,SAASwG,EAAEnF,GACvBtC,EAAS+pB,eAAetiB,EAAEnF,KAE7BrB,GAAE,QAAU,SAASwG,EAAEnF,GACpBtC,EAASgqB,YAAYviB,EAAEnF,EAEvB,IAAIsB,GAAO8H,EAAO9H,MAClB,IAAIqhB,EAAa,CACb,GAAQ,WAALoE,EACKY,GAGArmB,EAAK2F,QAAQ,SAASmC,GAAUA,EAAOyR,UAAW,IAClD1V,EAAE0V,UAAW,IAGb1V,EAAE0V,UAAY1V,EAAE0V,SACZvZ,EAAKgP,MAAM,SAASlH,GAAU,MAAOA,GAAOyR,YAG5CvZ,EAAK2F,QAAQ,SAASmC,GAAUA,EAAOyR,UAAW,SAGvD,IAAW,WAARkM,EACN,GAAGC,EACC7hB,EAAE8hB,YAAc9hB,EAAE8hB,WAClB9hB,EAAEyiB,aAAiC/mB,QAAlBsE,EAAEyiB,eAA8BziB,EAAE0V,SAAW1V,EAAEyiB,aAChEziB,EAAE0V,SAAW1V,EAAE8hB,YAAc9hB,EAAEyiB,iBAC5B,KAAKZ,EAAU,CAClB7hB,EAAE0V,UAAY1V,EAAE0V,SAChB1V,EAAEyiB,aAAeziB,EAAE0V,QACnB,IAAIgN,GAAUvmB,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE8hB,YAC9CY,GAAQvX,MAAM,SAASlH,GAAU,MAAOA,GAAOwe,gBAG/CtmB,EAAK2F,QAAQ,SAASmC,GAClBA,EAAOyR,SAAWzR,EAAOwe,cAAe,IAKxDlqB,EAASoiB,aACLjF,SAAUvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,WAC5CoM,WAAY3lB,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE8hB,kBAKzDtoB,GAAE,WAAa,SAASwG,EAAEnF,GACvB,IAAW,WAAR+mB,IAAqBC,KACxBtpB,EAASoqB,eAAe3iB,EAAEnF,GACtB2iB,GAAa,CAEb,GAAIrhB,GAAO8H,EAAO9H,MAGlBA,GAAK2F,QAAQ,SAASmC,GAClBA,EAAOyR,UAAW,EACP,WAARkM,IAAmB3d,EAAOwe,aAAexe,EAAOyR,YAEvD1V,EAAE0V,UAAW,EACF,WAARkM,IAAmB5hB,EAAEyiB,aAAeziB,EAAE0V,UACzCnd,EAASoiB,aACLjF,SAAUvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,gBAK5DzR,EAAO1B,QAAO,cAAgB,SAASvC,GAAK,MAAOA,GAAEyiB,eACrDxe,EAAOhE,OAAOC,SAEdkiB,EACK5iB,KAAI,OAASmiB,GACb5X,KAAK,SAAU/J,GAAK,MAAOwD,GAAaqF,EAAO7I,KAKpD,IAAI4iB,EACJ,QAAOhB,GACH,IAAK,UACDgB,EAAc,EACd,MACJ,KAAK,UACDA,EAAc,GAGtB,GAAIC,EAAO,CAEP,GAAIC,KACJ7e,GAAO/H,KAAK,SAAS8D,EAAEnF,GACnB,GAAIkoB,EACJ,IAAIvf,EAAaqF,EAAO7I,KAAOwD,EAAaqF,EAAO7I,IAAI/F,OAAS+oB,EAAc,CAC1E,GAAIC,GAAazf,EAAaqF,EAAO7I,IAAIkjB,UAAU,EAAGF,EACtDD,GAAa1qB,GAAG2G,OAAOpG,MAAMoG,OAAM,QAAS+K,KAAKkZ,EAAa,OAC9D5qB,GAAG2G,OAAOpG,MAAM2G,OAAM,aAAcwK,KAAKvG,EAAaqF,EAAO7I,SAE7D+iB,GAAa1qB,GAAG2G,OAAOpG,MAAMoG,OAAM,OAEvC,IAAImkB,EACJ,KAGI,GAFAA,EAAiBJ,EAAW9gB,OAAOyN,wBAEd,GAAlByT,EAAqB,KAAMC,SAElC,MAAM3pB,GACF0pB,EAAiBtrB,EAAGG,MAAM6R,oBAAoBkZ,GAGlDD,EAAaxnB,KAAK6nB,EAAiBE,IAOvC,KAJA,GAAIC,GAAe,EACfC,EAAc,EACdC,KAEkBhmB,EAAd+lB,GAAgCD,EAAeR,EAAa7oB,QAChEupB,EAAaF,GAAgBR,EAAaQ,GAC1CC,GAAeT,EAAaQ,IAIhC,KAFqB,IAAjBA,IAAoBA,EAAe,GAE/BC,EAAc/lB,GAAkB8lB,EAAe,GAAI,CACvDE,KACAF,GAEA,KAAK,GAAIG,GAAI,EAAGA,EAAIX,EAAa7oB,OAAQwpB,IACjCX,EAAaW,IAAMD,EAAaC,EAAIH,IAAiB,KACrDE,EAAaC,EAAIH,GAAgBR,EAAaW,GAGtDF,GAAcC,EAAaE,OAAO,SAASC,EAAMC,EAAK3iB,EAAO4iB,GACzD,MAAOF,GAAOC,IAKtB,IAAK,GADDE,MACKjpB,EAAI,EAAGkpB,EAAO,EAAOT,EAAJzoB,EAAkBA,IACxCipB,EAAWjpB,GAAKkpB,EAChBA,GAAQP,EAAa3oB,EAGzBoJ,GACKzE,KAAI,YAAc,SAASQ,EAAGnF,GAC3B,MAAO,aAAeipB,EAAWjpB,EAAIyoB,GAAgB,KAAO,EAAI/hB,KAAKwB,MAAMlI,EAAIyoB,GAAgBV,GAAe,MAIlH5G,EACAhM,EAAExQ,KAAI,YAAc,cAAgBP,EAAQ7B,EAAOuR,MAAQ4U,GAAe,IAAMnmB,EAAOE,IAAM,KAG7F0S,EAAExQ,KAAI,YAAc,eAAsBpC,EAAOE,IAAM,KAG3D4B,EAAS9B,EAAOE,IAAMF,EAAOsR,OAAUnN,KAAKyiB,KAAKlB,EAAa7oB,OAASqpB,GAAgBV,MAEpF,CAEH,GAGIqB,GAHAC,EAAO,EACPC,EAAU,EACVC,EAAW,CAEfngB,GACKzE,KAAI,YAAc,SAASQ,EAAGnF,GAC3B,GAAIZ,GAAS5B,GAAG2G,OAAOpG,MAAMoG,OAAM,QAASiD,OAAOyN,wBAA0B2T,CAW7E,OAVAY,GAAOE,EAEHllB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAAQsV,EAAOhqB,IAC5CkqB,EAAUF,EAAO,EACjBC,GAAQtB,GAGZuB,GAAWlqB,EACPkqB,EAAUC,IAAUA,EAAWD,GAE5B,aAAeF,EAAO,IAAMC,EAAO,MAIlDlU,EAAExQ,KAAI,YAAc,cAAgBP,EAAQ7B,EAAOuR,MAAQyV,GAAY,IAAMhnB,EAAOE,IAAM,KAE1F4B,EAAS9B,EAAOE,IAAMF,EAAOsR,OAASwV,EAAO,GAGtC,WAARtC,GAECI,EACKxiB,KAAI,QAAU,SAASQ,EAAEnF,GACtB,MAAOunB,GAAW,GAAGvnB,GAAG6U,wBAA0B,KAErDlQ,KAAI,SAAW,IACfA,KAAI,IAAM,IACVA,KAAI,IAAM,KAGnBwiB,EACK1f,MAAK,OAASyf,GACdzf,MAAK,SAAW,SAAStC,EAAEnF,GAAK,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,OAoB7DF,EA7SX,GAAIyC,IAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,GACT2J,EAAS,SAAS7I,GAAK,MAAOA,GAAEqE,KAChCb,EAAe,SAAUxD,GAAK,MAAOA,IACrCmE,EAAQtM,EAAGG,MAAMsQ,WACjB0a,EAAe,GACfH,GAAQ,EACRQ,EAAU,GACVrH,GAAa,EACbwB,GAAc,EACdgF,GAAkB,EAClBX,GAAW,EACXtpB,EAAWF,GAAGE,SAAQ,cAAgB,iBAAkB,kBAAmB,iBAAkB,eAC7FqpB,EAAO,SAsUb,OAhCAjnB,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAiB6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACxEtB,QAAiB4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAC1E6D,KAAiByC,IAAK,WAAW,MAAO+B,IAAU9B,IAAK,SAASvG,GAAGqI,EAAOrI,IAC1EgD,cAAiBsD,IAAK,WAAW,MAAOtD,IAAgBuD,IAAK,SAASvG,GAAGgD,EAAahD,IACtFqiB,OAAiB/b,IAAK,WAAW,MAAO+b,IAAS9b,IAAK,SAASvG,GAAGqiB,EAAMriB,IACxEwb,YAAiBlV,IAAK,WAAW,MAAOkV,IAAcjV,IAAK,SAASvG,GAAGwb,EAAWxb,IAClFwiB,cAAiBlc,IAAK,WAAW,MAAOkc,IAAgBjc,IAAK,SAASvG,GAAGwiB,EAAaxiB,IACtF6iB,SAAiBvc,IAAK,WAAW,MAAOuc,IAAWtc,IAAK,SAASvG,GAAG6iB,EAAQ7iB,IAC5Egd,aAAiB1W,IAAK,WAAW,MAAO0W,IAAezW,IAAK,SAASvG,GAAGgd,EAAYhd,IACpFgiB,iBAAiB1b,IAAK,WAAW,MAAO0b,IAAmBzb,IAAK,SAASvG,GAAGgiB,EAAgBhiB,IAC5FqhB,UAAiB/a,IAAK,WAAW,MAAO+a,IAAY9a,IAAK,SAASvG,GAAGqhB,EAASrhB,IAC9EohB,MAAiB9a,IAAK,WAAW,MAAO8a,IAAQ7a,IAAK,SAASvG,GAAGohB,EAAKphB,IAGtEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAIlC3I,EAAGG,MAAMqP,YAAY1M,GAEdA,GC1VX9C,EAAGI,OAAOosB,cAAgB,WACtB,YA8BA,SAAS1pB,GAAMsB,GA8IX,MA7IAA,GAAUC,KAAK,SAASC,GACpBmO,EAAYW,QAEZlM,EAAY1G,GAAG2G,OAAOpG,KACtB,IAAI4E,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAElEvF,GAAGG,MAAMsW,QAAQvP,GAGjBY,EAAEnB,OAAOqT,GAAWxZ,GAAGkf,OAAOpb,EAAK,GAAGuE,OAAO4L,IAAIwF,GAAMvY,OAAOke,KAE1DwB,EACAtZ,EAAErB,MAAMyT,IAA4B,GAAjBvU,EAAsBrB,EAAK,GAAGuE,OAAOzG,OAAQuD,GAAkBrB,EAAK,GAAGuE,OAAOzG,OAAS,IAAOkC,EAAK,GAAGuE,OAAOzG,SAEhI0F,EAAErB,MAAMyT,IAAW,EAAGvU,IAE1B4J,EAAE5I,OAAOyT,GAAW5Z,GAAGkf,OAAOpb,EAAK,GAAGuE,OAAO4L,IAAIoN,GAAMngB,OAAO4f,KACzD7a,MAAM0U,IAAWvV,EAAiB,IAGnCkC,EAAEnB,SAAS,KAAOmB,EAAEnB,SAAS,KAC7BmB,EAAEnB,SAAS,GACPmB,EAAEnB,QAAQmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,GAAWmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,KACzEmB,EAAEnB,QAAM,GAAK,KAEnB4I,EAAE5I,SAAS,KAAO4I,EAAE5I,SAAS,KAC7B4I,EAAE5I,SAAS,GACP4I,EAAE5I,QAAQ4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,GAAW4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,KACzE4I,EAAE5I,QAAM,GAAK,IAGvB,IAAIW,GAAOJ,EAAUK,UAAS,8BAAiCgD,GAAIjG,MAAMA,EAAK,GAAGuE,SAC7ErB,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,iCAAmC4C,GACtFkT,EAAYjW,EAAUE,OAAM,QAC5B8V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,WACjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvEyB,EACKvF,GAAE,QAAU,SAASwG,EAAEnF,GACpBtC,EAAS8gB,YACLld,KAAM6D,EACNiB,MAAOpG,EACPiK,IAAKzM,GAAGuE,MACRwF,GAAIA,MAIhBkT,EAAU/V,OAAM,YACXC,KAAI,KAAO,sBAAwB4C,GACnC7C,OAAM,QAEXJ,EAAKH,OAAM,uBAA0BoD,EAAK,SACrC5C,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpBuS,EAAExQ,KAAI,YAAc8Z,EAAW,2BAA6BlX,EAAK,IAAM,GAEvE,IAAI2b,GAAO5e,EAAKH,OAAM,YAAaI,UAAS,WACvCjD,KAAK,SAAS6D,GAAK,MAAOA,IAAK,SAASA,EAAEnF,GAAI,MAAOiX,GAAK9R,EAAEnF,IACjEkjB,GAAK9d,OAAOC,SAEZ6d,EAAKze,QAAQC,OAAM,QACdC,KAAI,IAAM,GACVA,KAAI,IAAM,SAASQ,EAAEnF,GAAM,MAAOhD,GAAGG,MAAM8H,UAAUsH,EAAE7F,KAAKL,IAAI,EAAGwY,EAAK1Z,EAAEnF,QAC1E2E,KAAI,SAAW,SAASQ,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUyB,KAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,OACnF5H,KAAI,YAAc,SAASQ,EAAEnF,GAAK,MAAO,cAAgB8E,EAAEmS,EAAK9R,EAAEnF,IAAM2C,EAAiBrB,EAAK,GAAGuE,OAAOzG,OAAS,KAAO,QACxHT,GAAE,YAAc,SAASwG,EAAEnF,GACnBkf,IACL1hB,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsb,kBACL1X,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,aAInC9I,GAAE,WAAa,SAASwG,EAAEnF,GAClBkf,IACL1hB,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsF,iBACL1B,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,aAGnC9I,GAAE,YAAc,SAASwG,EAAEnF,GACnBkf,GACLxhB,EAASmG,kBACLvC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,QAAU,SAASwG,EAAEnF,GACpB,GAAKkf,EAAL,CACA,GAAIkE,GAAUrlB,IACdL,GAASqG,cACLzC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5B1F,MAAOvE,GAAGuE,MACVqhB,QAASA,IAEb5lB,GAAGuE,MAAMshB,qBAEZ1kB,GAAE,WAAa,SAASwG,EAAEnF,GAClBkf,IACLxhB,EAAS4lB,iBACLhiB,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,UAEhCjK,GAAGuE,MAAMshB,qBAGjBH,EACKve,KAAI,OAAS,SAASQ,EAAEnF,GAAK,MAAOsJ,GAAMnE,EAAGnF,KAC7C2E,KAAI,QAAU,SAASQ,EAAEnF,EAAEwY,GAAK,OAAQqG,EAAK1Z,EAAEnF,GAAK,EAAI,kBAAoB,mBAAqB,WAAawY,EAAI,IAAMxY,IACxHwP,gBAAgBC,EAAa,QAC7B9K,KAAI,YAAc,SAASQ,EAAEnF,GAAK,MAAO,cAAgB8E,EAAEmS,EAAK9R,EAAEnF,IAAM2C,EAAiBrB,EAAK,GAAGuE,OAAOzG,OAAS,KAAO,QAExHuF,KAAI,QAAWhC,EAAiBrB,EAAK,GAAGuE,OAAOzG,OAAU,IAE9D8jB,EAAK1T,gBAAgBC,EAAa,QAC7B9K,KAAI,IAAM,SAASQ,EAAEnF,GAClB,GAAIypB,GAAO5K,EAAK1Z,EAAEnF,GAAK,EACnBuM,EAAE,GACEA,EAAE,GAAKA,EAAEsS,EAAK1Z,EAAEnF,IAAM,EAC1BuM,EAAE,GAAK,EACPA,EAAEsS,EAAK1Z,EAAEnF,GACb,OAAOhD,GAAGG,MAAM8H,UAAUwkB,KAE7B9kB,KAAI,SAAW,SAASQ,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUyB,KAAKL,IAAIK,KAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,IAAI,QAIzGkD,EAAYS,UAAS,2BACdpQ,EAtKX,GAcMkX,GACAI,EACAF,EACAiB,EAjBF5V,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,KACRC,EAAS,KACTkD,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrBjE,EAAY,KACZY,EAAItH,GAAG8H,MAAMC,SACbgH,EAAI/O,GAAG8H,MAAMC,SACb0R,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9BqQ,KACA0B,GAAU,GACVF,GAAU,EACVK,GAAW,EACXnV,EAAQtM,EAAGG,MAAMuQ,eAKjBhQ,EAAWF,GAAGE,SAAQ,aAAe,eAAgB,kBAAmB,mBAAoB,kBAAmB,mBAAoB,aACnIwhB,GAAc,EAGhBzP,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU,EA0MjD,OAtDAoC,GAAMqf,eAAiB,SAASC,EAAYC,GACxCnb,EACKC,OAAM,sBAAyBib,GAC/B1X,QAAO,QAAU2X,IAI1Bvf,EAAMwf,gBAAkB,WACpBpb,EACKC,OAAM,0BACNuD,QAAO,SAAU,IAQ1B5H,EAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAU6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACjEtB,QAAU4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACnEiX,QAAU3Q,IAAK,WAAW,MAAO2Q,IAAU1Q,IAAK,SAASvG,GAAGiX,EAAOjX,IACnE2Y,QAAUrS,IAAK,WAAW,MAAOqS,IAAUpS,IAAK,SAASvG,GAAG2Y,EAAO3Y,IACnEyY,SAAUnS,IAAK,WAAW,MAAOmS,IAAWlS,IAAK,SAASvG,GAAGyY,EAAQzY,IACrEb,GAAUmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IAC/D4G,GAAUN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAKlZ,IAC/DvC,QAAU6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IACzDuS,QAAUjM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IACzDqR,SAAU/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IACrEyR,SAAUnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IACrEuR,QAAUjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACnEwS,QAAUlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACnE8Y,UAAcxS,IAAK,WAAW,MAAOwS,IAAYvS,IAAK,SAASvG,GAAG8Y,EAAS9Y,IAC3E4B,IAAc0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAC/DuZ,aAAcjT,IAAK,WAAW,MAAOiT,IAAehT,IAAK,SAASvG,GAAGuZ,EAAYvZ,IAGjFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAIlC3I,EAAGG,MAAMqP,YAAY1M,GAEdA,GCvOX9C,EAAGI,OAAOssB,mBAAqB,SAASC,GACpC,YAqDA,SAAS7pB,GAAMsB,GAuNX,MAtNAA,GAAUC,KAAK,SAASC,GACpBmO,EAAYW,QACZX,EAAYrS,OAAO8lB,GACfhJ,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,EAElC,IAAInW,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAQlE,IANAzC,EAAMqR,OAAS,WAAajN,EAAUiH,aAAa1F,SAASmkB,GAAoBxrB,KAAK0B,IACrFA,EAAMoE,UAAYnG,KAGlBgR,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,KAAKlI,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,SAItCP,EAAIoe,EAAK9f,SACTmJ,EAAI2W,EAAKhL,QAGT,IAAI5T,GAAOJ,EAAUK,UAAS,mCAAoCjD,MAAMA,IACpEkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,sCAAsCD,OAAM,KAC5FyQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,eACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBAG5Bkc,GAGDC,EAAO1c,MAAMzB,GAEbwS,EAAEhR,OAAM,kBACHyW,MAAMtZ,GACNlD,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE+B,EAAKH,OAAM,kBACNQ,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,MAdtD0S,EAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,SAgB9Cf,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEnEiY,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAIvD0e,IACAC,EACKld,MAAMzB,GACN0B,OAAOzB,GACPL,QAAQC,KAAKD,EAAOC,KAAMC,IAAIF,EAAOE,MACrCmC,aAAaV,GACbd,OAAO0B,GACZR,EAAKH,OAAM,mBAAoB/F,KAAKkjB,IAExC4B,EACK9e,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAE9C,IAAIF,GAAWxF,EAAEhR,OAAM,gBAClByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAC/CF,GAASxP,aAAa/M,KAAK8kB,GAGvBhJ,IACAC,EACK7U,MAAMR,GACNid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAUlY,EAAiB,GAEhCuS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,KACvD0R,EAAEhR,OAAM,iBACHgH,aACA/M,KAAK+b,IAGVC,IACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAEhCwS,EAAEhR,OAAM,iBACHgH,aACA/M,KAAKic,IAOdiH,EAAiB5jB,SAASiB,GAAE,mBAAqB,SAASC,GACtDskB,EAAK5D,iBAEL,IAAI2C,GAAa7C,EAAY8C,EAAgBC,IAC7C7gB,GACKmI,OAAO,SAASL,EAAQpJ,GAErB,MADAoJ,GAAOqY,YAAczhB,GACboJ,EAAOyR,WAElB5T,QAAQ,SAASmC,EAAOpJ,GACrBof,EAAapiB,EAAG4I,kBAAkBwD,EAAOvD,OAAQjH,EAAE0E,YAAaxD,EAAMgF,KACtEoe,EAAK/D,eAAeC,GAAW,EAC/B,IAAI9S,GAAQlD,EAAOvD,OAAOuZ,EACZve,UAAVyL,IACgBzL,SAAhBohB,IAA2BA,EAAc3V,GACtBzL,SAAnBqhB,IAA8BA,EAAiBpiB,EAAMsD,SAAStD,EAAMgF,IAAIwH,EAAM8S,KAClF+C,EAAQ1hB,MACJ+I,IAAKJ,EAAOI,IACZR,MAAOlJ,EAAMyM,IAAID,EAAO8S,GACxB9V,MAAOA,EAAMF,EAAOA,EAAOqY,aAC3BngB,KAAM8H,EAAOvD,OAAOuZ,OAIhC,IAAIkD,GAASnI,EAAM3E,aAAa1V,EAAMgF,IAAImd,EAAY7C,GACtDkC,GAAiBpkB,QACZuL,eAAe,SAAStD,EAAEnF,GACvB,MAAOqa,GAAM7E,aAAarQ,KAE7B7D,MACG0H,MAAOsZ,EACPlc,MAAOgZ,EACPhW,OAAQ+Y,MAGhBb,EAAiBre,gBAAgBif,KAIrCZ,EAAiB5jB,SAASiB,GAAE,kBAAmB,SAASC,GACpDlB,EAASmsB,cACT3G,EAAK5D,oBAGTwB,EAAOpjB,SAASiB,GAAE,cAAgB,SAASwG,EAAEnF,GACzCmF,EAAE0V,UAAY1V,EAAE0V,SAEXvZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAAYzb,QACjDkC,EAAKmQ,IAAI,SAAStM,GAGd,MAFAA,GAAE0V,UAAW,EACbvW,EAAKC,UAAS,cAAemD,QAAO,YAAa,GAC1CvC,IAIf4J,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,WACnDnd,EAASoiB,YAAY/Q,GAErB3N,EAAU+J,aAAa/M,KAAK0B,KAGhCghB,EAAOpjB,SAASiB,GAAE,iBAAmB,SAASwG,GAE1C7D,EAAK2F,QAAQ,SAAS9B,GAClBA,EAAE0V,UAAW,IAEjB1V,EAAE0V,UAAW,EAEb9L,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,WACnDnd,EAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAGVzT,EAASiB,GAAE,cAAgB,SAASC,GACN,mBAAfA,GAAEic,WACTvZ,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAGjC+O,EAAM8L,SAAWjc,EAAEic,UAGvB/a,EAAMqR,aAId1B,EAAYS,UAAS,gCACdpQ,EAtQX,GAmBMgF,GACAyH,EApBF2W,EAAOyG,GAAa3sB,EAAGI,OAAOosB,gBAC5BrP,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAClB4L,EAAS9jB,EAAGI,OAAO0jB,SACnBQ,EAAmBtkB,EAAGkE,uBACtBhE,EAAUF,EAAGI,OAAOF,UAItBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZzX,EAAQtM,EAAGG,MAAMuQ,eACjBtJ,EAAQ,KACRC,EAAS,KACTwc,GAAa,EACb3G,GAAY,EACZE,GAAY,EACZM,GAAkB,EAClB2G,GAA0B,EAG1BtS,KACAoR,EAAe,KACfpM,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,cAAgB,cAAe,cAAe,aACpEksB,EAAqB,GAG3BzP,GAAM9E,OAAM,UAAWW,YAAY,GACnCqE,EAAMhF,OAAQ,EAAoB,QAAU,QAC5CnY,EACKuI,SAAS,GACT+C,eAAc,GACdC,eAAe,SAAStD,EAAGnF,GACxB,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAEhC0I,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,IAQrC,IAAIyP,GAAczS,EAAGG,MAAMsS,YAAY/R,EAAU,EA8SjD,OA9EAwlB,GAAKxlB,SAASiB,GAAE,2BAA6B,SAASoc,GAClDA,EAAW,QACPvR,IAAK1J,EAAMgF,IAAIiW,EAAIzZ,MACnB0H,MAAOlJ,EAAMyM,IAAIwO,EAAIzZ,MACrBgI,MAAOyR,EAAIzR,OAEfpM,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7BggB,EAAKxlB,SAASiB,GAAE,0BAA4B,SAASoc,GACjD7d,EAAQgG,QAAO,KAGnBggB,EAAKxlB,SAASiB,GAAE,2BAA6B,SAASoc,GAClD7d,MAQJ4C,EAAMpC,SAAWA,EACjBoC,EAAMojB,KAAOA,EACbpjB,EAAMghB,OAASA,EACfhhB,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAMwhB,iBAAmBA,EACzBxhB,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9EuU,WAAYjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAC3EyU,WAAYnO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC3Ewa,cAAkBlU,IAAK,WAAW,MAAOkU,IAAgBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IACvFoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAGrEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,GACb4Z,EAAK5Z,MAAMA,KAEf7D,UAAcwG,IAAK,WAAW,MAAO2d,IAAsB1d,IAAK,SAASvG,GACrEikB,EAAmBjkB,EACnB8J,EAAYW,MAAMwZ,GAClBvP,EAAM5U,SAASmkB,GACfzP,EAAM1U,SAASmkB,KAEnBlP,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQ,EAAM,QAAU,UAElCgM,yBAA0BpV,IAAK,WAAW,MAAOoV,IAA2BnV,IAAK,SAASvG,GACtF0b,EAA0B1b,EACtBA,KAAM,GACN7F,EAAMof,aAAY,OAK9BliB,EAAGG,MAAMkW,eAAevT,EAAOojB,GAC/BlmB,EAAGG,MAAMqP,YAAY1M,GAEdA,GAKX9C,EAAGI,OAAO0sB,aAAe,WACrB,GAAIhqB,GAAQ9C,EAAGI,OAAOssB,mBAAmB1sB,EAAGI,OAAO2sB,UAkBnD,OAfAjqB,GAAMuhB,yBAAwB,GAC9BvhB,EAAMwhB,iBAAiBpkB,QAAQ6K,iBAAiB,SAASzG,GAErD,GAAI6D,GAAI7D,EAAK8H,OAAO,GAAG9H,KAEnBgI,EAAQnE,EAAE2Z,KAAO3Z,EAAE4Z,MAAQ,SAAW,QAC1C,OAAO,sBACqBzV,EAAQ,KAAOhI,EAAK0H,MAAQ,qCAEzBlJ,EAAMua,MAAM7E,aAAarQ,EAAE2Z,MAAQ,oCAClChf,EAAMua,MAAM7E,aAAarQ,EAAE4Z,OAAS,kCACtCjf,EAAMua,MAAM7E,aAAarQ,EAAE6Z,MAAQ,kCACnClf,EAAMua,MAAM7E,aAAarQ,EAAE8Z,KAAO,uBAG7Dnf,GAIX9C,EAAGI,OAAO4sB,oBAAsB,WAC5B,GAAIlqB,GAAQ9C,EAAGI,OAAOssB,mBAAmB1sB,EAAGI,OAAO8gB,iBAkBnD,OAfApe,GAAMuhB,yBAAwB,GAC9BvhB,EAAMwhB,iBAAiBpkB,QAAQ6K,iBAAiB,SAASzG,GAErD,GAAI6D,GAAI7D,EAAK8H,OAAO,GAAG9H,KAEnBgI,EAAQnE,EAAE2Z,KAAO3Z,EAAE4Z,MAAQ,SAAW,QAC1C,OAAO,sBACqBzV,EAAQ,KAAOhI,EAAK0H,MAAQ,qCAEzBlJ,EAAMua,MAAM7E,aAAarQ,EAAE2Z,MAAQ,oCAClChf,EAAMua,MAAM7E,aAAarQ,EAAE4Z,OAAS,kCACtCjf,EAAMua,MAAM7E,aAAarQ,EAAE6Z,MAAQ,kCACnClf,EAAMua,MAAM7E,aAAarQ,EAAE8Z,KAAO;AnB5YxE,GmB+YWnf,GClZX9C,EAAGI,OAAO0jB,OAAS,WACf,YAuBA,SAAShhB,GAAMsB,GAoSX,QAAS0lB,GAAa3hB,EAAEnF,GACpB,MAAW,WAAR+mB,EAA0B,OAC1BC,EACQ7hB,EAAE8hB,WAAa,OAAS,OACvBD,EAAL,QACC7hB,EAAEmE,QAAOnE,EAAEmE,MAAQA,EAAMnE,EAAEnF,IACtBmF,EAAE0V,SAAW1V,EAAEmE,MAAQ,QAIxC,QAAS4d,GAAW/hB,EAAEnF,GAClB,MAAGgnB,IAAoB,WAARD,GACJ5hB,EAAE8hB,WAAa,OAEf9hB,EAAEmE,OAASA,EAAMnE,EAAEnF,GAKlC,QAASiqB,GAAa9kB,EAAEnF,GACpB,MAAGgnB,IAAoB,WAARD,EACJ,EAEE5hB,EAAE0V,SAAW,EAAI,EAIlC,MA9TAzZ,GAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9C5P,EAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EAGjB,IAAII,GAAOJ,EAAUK,UAAS,eAAgBjD,MAAMA,IAChDkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,kBAAkBD,OAAM,KACxEyQ,EAAI7Q,EAAKH,OAAM,IAEfgd,GACA7c,EAAKK,KAAI,YAAc,cAAkBpC,EAAOuR,MAAS,IAAMvR,EAAOE,IAAM,KAE5E6B,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAE3E,IAUI0kB,GAEAY,EAZA3e,EAAS+L,EAAE5Q,UAAS,cACnBjD,KAAK,SAAS6D,GACX,MAAW,WAAR4hB,EAA0B5hB,EAEtBA,EAAEsE,OAAO,SAAS8F,GACrB,MAAOyX,IAAW,GAAQzX,EAAE0X,eAIpCG,EAAche,EAAO3E,QAAQC,OAAM,KAAMC,KAAI,QAAU,YAI3D,QAAOoiB,GACH,IAAK,UACDgB,EAAc,EACd,MACJ,KAAK,UACDA,EAAc,GAGtB,GAAW,WAARhB,EACCK,EAAY1iB,OAAM,UACb+C,MAAK,eAAiB,GACtB9C,KAAI,QAAO,oBACXA,KAAI,IAAM,GAEfwiB,EAAc/d,EAAOjF,OAAM,yBACxB,IAAY,WAAR4iB,EAAmB,CAC1BK,EAAY1iB,OAAM,QACb+C,MAAK,eAAiB,GACtB9C,KAAI,QAAO,oBACXA,KAAI,KAAO,GACXA,KAAI,KAAO,GAChBwiB,EAAc/d,EAAOjF,OAAM,qBAE3BijB,EAAY1iB,OAAM,KACbC,KAAI,QAAU,gBACd0iB,SAAQ,YAAW,0KACnB1iB,KAAI,YAAc,8BAEvB,IAAI2iB,GAAiBle,EAAOjF,OAAM,gBAElCmjB,GAAejmB,KAAK,SAAS8D,EAAEnF,GAC3BxC,GAAG2G,OAAOpG,MAAMwG,UAAS,QACpBI,KAAI,SAAWmiB,EAAa3hB,EAAEnF,MAI3ConB,EAAY1iB,OAAM,QACbC,KAAI,cAAgB,SACpBA,KAAI,QAAO,kBACXA,KAAI,KAAO,SACXA,KAAI,KAAO,IAEhB,IAAI4iB,GAAane,EAAOjF,OAAM,sBAE9BiF,GACKzK,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAAS8pB,gBAAgBriB,EAAEnF,KAE9BrB,GAAE,WAAa,SAASwG,EAAEnF,GACvBtC,EAAS+pB,eAAetiB,EAAEnF,KAE7BrB,GAAE,QAAU,SAASwG,EAAEnF,GACpBtC,EAASgqB,YAAYviB,EAAEnF,EAEvB,IAAIsB,GAAO8H,EAAO9H,MAClB,IAAIqhB,EAAa,CACb,GAAQ,WAALoE,EACKY,GAGArmB,EAAK2F,QAAQ,SAASmC,GAAUA,EAAOyR,UAAW,IAClD1V,EAAE0V,UAAW,IAGb1V,EAAE0V,UAAY1V,EAAE0V,SACZvZ,EAAKgP,MAAM,SAASlH,GAAU,MAAOA,GAAOyR,YAG5CvZ,EAAK2F,QAAQ,SAASmC,GAAUA,EAAOyR,UAAW,SAGvD,IAAW,WAARkM,EACN,GAAGC,EACC7hB,EAAE8hB,YAAc9hB,EAAE8hB,WAClB9hB,EAAEyiB,aAAiC/mB,QAAlBsE,EAAEyiB,eAA8BziB,EAAE0V,SAAW1V,EAAEyiB,aAChEziB,EAAE0V,SAAW1V,EAAE8hB,YAAc9hB,EAAEyiB,iBAC5B,KAAKZ,EAAU,CAClB7hB,EAAE0V,UAAY1V,EAAE0V,SAChB1V,EAAEyiB,aAAeziB,EAAE0V,QACnB,IAAIgN,GAAUvmB,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE8hB,YAC9CY,GAAQvX,MAAM,SAASlH,GAAU,MAAOA,GAAOwe,gBAG/CtmB,EAAK2F,QAAQ,SAASmC,GAClBA,EAAOyR,SAAWzR,EAAOwe,cAAe,IAKxDlqB,EAASoiB,aACLjF,SAAUvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,WAC5CoM,WAAY3lB,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE8hB,kBAKzDtoB,GAAE,WAAa,SAASwG,EAAEnF,GACvB,IAAW,WAAR+mB,IAAqBC,KACxBtpB,EAASoqB,eAAe3iB,EAAEnF,GACtB2iB,GAAa,CAEb,GAAIrhB,GAAO8H,EAAO9H,MAGlBA,GAAK2F,QAAQ,SAASmC,GAClBA,EAAOyR,UAAW,EACP,WAARkM,IAAmB3d,EAAOwe,aAAexe,EAAOyR,YAEvD1V,EAAE0V,UAAW,EACF,WAARkM,IAAmB5hB,EAAEyiB,aAAeziB,EAAE0V,UACzCnd,EAASoiB,aACLjF,SAAUvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,gBAK5DzR,EAAO1B,QAAO,cAAgB,SAASvC,GAAK,MAAOA,GAAEyiB,eACrDxe,EAAOhE,OAAOC,SAEdkiB,EACK5iB,KAAI,OAASmiB,GACb5X,KAAK,SAAU/J,GAAK,MAAOwD,GAAaqF,EAAO7I,KAIpD,IAAIujB,GAAc,CAClB,IAAIV,EAAO,CAEP,GAAIC,KACJ7e,GAAO/H,KAAK,SAAS8D,EAAEnF,GACnB,GAAIkoB,EACJ,IAAIvf,EAAaqF,EAAO7I,KAAOwD,EAAaqF,EAAO7I,IAAI/F,OAAS+oB,EAAc,CAC1E,GAAIC,GAAazf,EAAaqF,EAAO7I,IAAIkjB,UAAU,EAAGF,EACtDD,GAAa1qB,GAAG2G,OAAOpG,MAAMoG,OAAM,QAAS+K,KAAKkZ,EAAa,OAC9D5qB,GAAG2G,OAAOpG,MAAM2G,OAAM,aAAcwK,KAAKvG,EAAaqF,EAAO7I,SAE7D+iB,GAAa1qB,GAAG2G,OAAOpG,MAAMoG,OAAM,OAEvC,IAAImkB,EACJ,KAGI,GAFAA,EAAiBJ,EAAW9gB,OAAOyN,wBAEd,GAAlByT,EAAqB,KAAMC,SAElC,MAAM3pB,GACF0pB,EAAiBtrB,EAAGG,MAAM6R,oBAAoBkZ,GAGlDD,EAAaxnB,KAAK6nB,EAAiBE,IAGvC,IAAIC,GAAe,EACfE,IAGJ,KAFAD,EAAc,EAEQ/lB,EAAd+lB,GAAgCD,EAAeR,EAAa7oB,QAChEupB,EAAaF,GAAgBR,EAAaQ,GAC1CC,GAAeT,EAAaQ,IAIhC,KAFqB,IAAjBA,IAAoBA,EAAe,GAE/BC,EAAc/lB,GAAkB8lB,EAAe,GAAI,CACvDE,KACAF,GAEA,KAAK,GAAIG,GAAI,EAAGA,EAAIX,EAAa7oB,OAAQwpB,IACjCX,EAAaW,IAAMD,EAAaC,EAAIH,IAAiB,KACrDE,EAAaC,EAAIH,GAAgBR,EAAaW,GAGtDF,GAAcC,EAAaE,OAAO,SAASC,EAAMC,EAAK3iB,EAAO4iB,GACzD,MAAOF,GAAOC,IAKtB,IAAK,GADDE,MACKjpB,EAAI,EAAGkpB,EAAO,EAAOT,EAAJzoB,EAAkBA,IACxCipB,EAAWjpB,GAAKkpB,EAChBA,GAAQP,EAAa3oB,EAGzBoJ,GACKzE,KAAI,YAAc,SAASQ,EAAGnF,GAC3B,MAAO,aAAeipB,EAAWjpB,EAAIyoB,GAAgB,KAAO,EAAI/hB,KAAKwB,MAAMlI,EAAIyoB,GAAgBV,GAAe,MAIlH5G,EACAhM,EAAExQ,KAAI,YAAc,cAAgBP,EAAQ7B,EAAOuR,MAAQ4U,GAAe,IAAMnmB,EAAOE,IAAM,KAG7F0S,EAAExQ,KAAI,YAAc,eAAsBpC,EAAOE,IAAM,KAG3D4B,EAAS9B,EAAOE,IAAMF,EAAOsR,OAAUnN,KAAKyiB,KAAKlB,EAAa7oB,OAASqpB,GAAgBV,MAEpF,CAEH,GAGIqB,GAHAC,EAAO,EACPC,EAAU,EACVC,EAAW,CAEfngB,GACKzE,KAAI,YAAc,SAASQ,EAAGnF,GAC3B,GAAIZ,GAAS5B,GAAG2G,OAAOpG,MAAMoG,OAAM,QAASiD,OAAOyN,wBAA0B2T,CAc7E,OAbAY,GAAOE,EAEHllB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAAQsV,EAAOhqB,IAC5CkqB,EAAUF,EAAO,EACjBC,GAAQtB,GAGZuB,GAAWlqB,EACPkqB,EAAUC,IAAUA,EAAWD,GAElBF,EAAOG,EAArBb,IACCA,EAAcU,EAAOG,GAElB,aAAeH,EAAO,IAAMC,EAAO,MAIlDlU,EAAExQ,KAAI,YAAc,cAAgBP,EAAQ7B,EAAOuR,MAAQyV,GAAY,IAAMhnB,EAAOE,IAAM,KAE1F4B,EAAS9B,EAAOE,IAAMF,EAAOsR,OAASwV,EAAO,GAGjD,GAAW,WAARtC,EAAmB,CAElBI,EACKxiB,KAAI,QAAU,SAASQ,EAAEnF,GACtB,MAAOunB,GAAW,GAAGvnB,GAAG6U,wBAA0B,KAErDlQ,KAAI,SAAW,IACfA,KAAI,IAAM,IACVA,KAAI,IAAM,KAGf6V,EAAO0P,OAAM,OAAM,gBACdvlB,KAAI,QAAU,gBACdA,KAAI,OAAS,QAEbA,KAAI,UAAW,EAEpB,IAAIwlB,GAAWhV,EAAEhR,OAAM,gBAEvBgmB,GACChf,aAAa1F,SAAS,KAClBd,KAAI,KAAOojB,GACXpjB,KAAI,QAAU+jB,EAAcX,EAAc,IAC1CpjB,KAAI,SAAWN,EAAS,IACxBM,KAAI,KAAOpC,EAAOE,IAAM,IACxBkC,KAAI,UAAYqiB,EAAW,EAAI,GAKxCG,EACK1f,MAAK,OAASyf,GACdzf,MAAK,eAAiBwiB,GACtBxiB,MAAK,SAAWyf,KA8BlBpnB,EAhVX,GAAIyC,IAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,GACT2J,EAAS,SAAS7I,GAAK,MAAOA,GAAEqE,KAChCb,EAAe,SAAUxD,GAAK,MAAOA,IACrCmE,EAAQtM,EAAGG,MAAMsQ,WACjB0a,EAAe,GACfH,GAAQ,EACRQ,EAAU,GACVrH,GAAa,EACbwB,GAAc,EACdgF,GAAkB,EAClBX,GAAW,EACXtpB,EAAWF,GAAGE,SAAQ,cAAgB,iBAAkB,kBAAmB,iBAAkB,eAC7FqpB,EAAO,SAyWb,OAhCAjnB,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAiB6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACxEtB,QAAiB4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAC1E6D,KAAiByC,IAAK,WAAW,MAAO+B,IAAU9B,IAAK,SAASvG,GAAGqI,EAAOrI,IAC1EgD,cAAiBsD,IAAK,WAAW,MAAOtD,IAAgBuD,IAAK,SAASvG,GAAGgD,EAAahD,IACtFqiB,OAAiB/b,IAAK,WAAW,MAAO+b,IAAS9b,IAAK,SAASvG,GAAGqiB,EAAMriB,IACxEwiB,cAAiBlc,IAAK,WAAW,MAAOkc,IAAgBjc,IAAK,SAASvG,GAAGwiB,EAAaxiB,IACtFwb,YAAiBlV,IAAK,WAAW,MAAOkV,IAAcjV,IAAK,SAASvG,GAAGwb,EAAWxb,IAClF6iB,SAAiBvc,IAAK,WAAW,MAAOuc,IAAWtc,IAAK,SAASvG,GAAG6iB,EAAQ7iB,IAC5Egd,aAAiB1W,IAAK,WAAW,MAAO0W,IAAezW,IAAK,SAASvG,GAAGgd,EAAYhd,IACpFgiB,iBAAiB1b,IAAK,WAAW,MAAO0b,IAAmBzb,IAAK,SAASvG,GAAGgiB,EAAgBhiB,IAC5FqhB,UAAiB/a,IAAK,WAAW,MAAO+a,IAAY9a,IAAK,SAASvG,GAAGqhB,EAASrhB,IAC9EohB,MAAiB9a,IAAK,WAAW,MAAO8a,IAAQ7a,IAAK,SAASvG,GAAGohB,EAAKphB,IAGtEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAIlC3I,EAAGG,MAAMqP,YAAY1M,GAEdA,GC7XX9C,EAAGI,OAAO4H,KAAO,WACb,YA6CA,SAASlF,GAAMsB,GA2HX,MA1HAqO,GAAYW,QACZX,EAAYrS,OAAOgtB,GACnBhpB,EAAUC,KAAK,SAASC,GACpB4C,EAAY1G,GAAG2G,OAAOpG,KACtB,IAAI4E,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAClEvF,GAAGG,MAAMsW,QAAQvP,GAGjBY,EAAIslB,EAAQhnB,SACZmJ,EAAI6d,EAAQlS,SAEZuF,EAAKA,GAAM3Y,EACXie,EAAKA,GAAMxW,CAGX,IAAIjI,GAAOJ,EAAUK,UAAS,qBAAsBjD,MAAMA,IACtDkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,wBACnD8V,EAAYjW,EAAUE,OAAM,QAC5B8V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvE2nB,EACKhmB,MAAMzB,GACN0B,OAAOzB,EAEZ,IAAIynB,GAAc/lB,EAAKH,OAAM,kBAC7BkmB,GAAYjsB,KAAKgsB,GAEjB3P,EAAU/V,OAAM,YACXC,KAAI,KAAO,gBAAkBylB,EAAQ7iB,MACrC7C,OAAM,QAEXJ,EAAKH,OAAM,iBAAoBimB,EAAQ7iB,KAAO,SACzC5C,KAAI,QAAUhC,GACdgC,KAAI,SAAY/B,EAAkB,EAAKA,EAAkB,GAE9DuS,EAAKxQ,KAAI,YAAc8Z,EAAW,qBAAuB2L,EAAQ7iB,KAAO,IAAM,IAC9E8iB,EACK1lB,KAAI,YAAc8Z,EAAW,qBAAuB2L,EAAQ7iB,KAAO,IAAM,GAE9E,IAAI0b,GAAS3e,EAAKH,OAAM,cAAeI,UAAS,aAC3CjD,KAAK,SAAS6D,GAAK,MAAOA,IAAK,SAASA,GAAK,MAAOA,GAAEqE,KAC3DyZ,GAAOxe,QAAQC,OAAM,KAChB+C,MAAK,iBAAmB,MACxBA,MAAK,eAAiB,SAAStC,GAAK,MAAOA,GAAEmlB,aAAeA,IAC5D7iB,MAAK,eAAiB,MAE3Bwb,EAAO7d,OAAOC,SAEd4d,EACKte,KAAI,QAAU,SAASQ,EAAEnF,GACtB,OAAQmF,EAAEuC,SAAW,IAAM,uBAAyB1H,IAEvD0H,QAAO,QAAU,SAASvC,GAAK,MAAOA,GAAEsT,QACxChR,MAAK,OAAS,SAAStC,EAAEnF,GAAI,MAAOsJ,GAAMnE,EAAGnF,KAC7CyH,MAAK,SAAW,SAAStC,EAAEnF,GAAI,MAAOsJ,GAAMnE,EAAGnF,KACpDijB,EAAOzT,gBAAgBC,EAAa,gBAC/BhI,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,SAAStC,GAAK,MAAOA,GAAEolB,aAAe,IAEjE,IAAIC,GAAYvH,EAAO1e,UAAS,gBAC3BjD,KAAK,SAAS6D,GAAK,MAAOslB,GAAOtlB,IAAMA,OAC5CqlB,GAAU/lB,QAAQC,OAAM,QACnBC,KAAI,QAAU,WACdA,KAAI,IAAM,SAASQ,GAChB,MAAO3H,IAAG0V,IAAIwX,OACTzF,YAAYA,GACZ0F,QAAQA,GACR7lB,EAAE,SAASK,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUwY,EAAGxG,EAAK9R,EAAEnF,OACtD+iB,GAAG,SAAS5d,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAU8d,EAAGlE,EAAK1Z,EAAEnF,OACvD4qB,GAAG,SAASzlB,EAAEnF,GAAK,MAAO+iB,GAAIxW,EAAE5I,SAAS,IAAM,EAAI4I,EAAE5I,SAAS,IAAM,EAAI,EAAI4I,EAAE5I,SAAS,GAAK4I,EAAE5I,SAAS,MAEvGlF,MAAMV,MAAOoH,EAAEU,WAE5Bod,EAAO7d,OAAOb,UAAS,gBAClBc,SAELmlB,EAAUhb,gBAAgBC,EAAa,mBAClC9K,KAAI,IAAM,SAASQ,GAChB,MAAO3H,IAAG0V,IAAIwX,OACTzF,YAAYA,GACZ0F,QAAQA,GACR7lB,EAAE,SAASK,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUH,EAAEmS,EAAK9R,EAAEnF,OACrD+iB,GAAG,SAAS5d,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUsH,EAAEsS,EAAK1Z,EAAEnF,OACtD4qB,GAAG,SAASzlB,EAAEnF,GAAK,MAAOuM,GAAGA,EAAE5I,SAAS,IAAM,EAAI4I,EAAE5I,SAAS,IAAM,EAAI,EAAI4I,EAAE5I,SAAS,GAAK4I,EAAE5I,SAAS,MAEtGlF,MAAMV,MAAOoH,EAAEU,UAG5B,IAAIglB,GAAY5H,EAAO1e,UAAS,gBAC3BjD,KAAK,SAAS6D,GAAK,OAAQA,EAAEU,SAElCglB,GAAUpmB,QAAQC,OAAM,QACnBC,KAAI,QAAU,WACdA,KAAI,IACDnH,GAAG0V,IAAIlO,OACNigB,YAAYA,GACZ0F,QAAQA,GACR7lB,EAAE,SAASK,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUwY,EAAGxG,EAAK9R,EAAEnF,OACtDuM,EAAE,SAASpH,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAU8d,EAAGlE,EAAK1Z,EAAEnF,QAG/D6qB,EAAUrb,gBAAgBC,EAAa,mBAClC9K,KAAI,IACDnH,GAAG0V,IAAIlO,OACNigB,YAAYA,GACZ0F,QAAQA,GACR7lB,EAAE,SAASK,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUH,EAAEmS,EAAK9R,EAAEnF,OACrDuM,EAAE,SAASpH,EAAEnF,GAAK,MAAOhD,GAAGG,MAAM8H,UAAUsH,EAAEsS,EAAK1Z,EAAEnF,QAI9Dyd,EAAK3Y,EAAEgS,OACPiM,EAAKxW,EAAEuK,SAEXrH,EAAYS,UAAS,kBACdpQ,EAnKX,GAcMgF,GACAyH,EAfD6d,EAAUptB,EAAGI,OAAOgtB,UAGrB7nB,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,IACTH,EAAY,KACZomB,EAAc,IACdhhB,EAAQtM,EAAGG,MAAMuQ,eACjBuJ,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9Boe,EAAU,SAASxlB,EAAEnF,GAAK,OAAQmN,MAAM0R,EAAK1Z,EAAEnF,KAAqB,OAAd6e,EAAK1Z,EAAEnF,IAC7DyqB,EAAS,SAAStlB,GAAK,MAAOA,GAAEulB,MAChCjM,GAAW,EAGXwG,EAAc,SACdxf,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,eAAiB,mBAAoB,kBAAmB,YAGpF0sB,GACKU,UAAU,IACVC,aAAa,GAAG,KAUrB,IAAItN,GAAIsF,EACFtT,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EA0LnD,OAjDA3F,GAAMpC,SAAWA,EACjBoC,EAAMsqB,QAAUA,EAEhBA,EAAQ1sB,SAASiB,GAAE,eAAiB,WAAYjB,EAASqG,aAAatF,MAAMV,KAAMM,aAClF+rB,EAAQ1sB,SAASiB,GAAE,mBAAqB,WAAYjB,EAASsb,iBAAiBva,MAAMV,KAAMM,aAC1F+rB,EAAQ1sB,SAASiB,GAAE,kBAAoB,WAAYjB,EAASsF,gBAAgBvE,MAAMV,KAAMM,aAExFyB,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEglB,SAAU1e,IAAK,WAAW,MAAO0e,IAAWze,IAAK,SAASvG,GAAGglB,EAAQhlB,IACrEsf,aAAmBhZ,IAAK,WAAW,MAAOgZ,IAAe/Y,IAAK,SAASvG,GAAGsf,EAAYtf,IACtF8Y,UAAcxS,IAAK,WAAW,MAAOwS,IAAYvS,IAAK,SAASvG,GAAG8Y,EAAS9Y,IAG3EpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClB2kB,EAAQ3kB,SAASA,KAErBglB,QAASxe,IAAK,WAAW,MAAOwe,IAAUve,IAAK,SAASvG,GACpD8kB,EAASjtB,GAAG4V,QAAQzN,KAExBb,GAAImH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAC7CsR,EAAOtR,EACPykB,EAAQtlB,EAAEa,KAEd4G,GAAIN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAC7CkZ,EAAOlZ,EACPykB,EAAQ7d,EAAE5G,KAEd2D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1BykB,EAAQ9gB,MAAMA,OAItBtM,EAAGG,MAAMkW,eAAevT,EAAOsqB,GAC/BptB,EAAGG,MAAMqP,YAAY1M,GAEdA,GCnOX9C,EAAGI,OAAO4tB,UAAY,WAClB,YA8EA,SAASlrB,GAAMsB,GA0VX,MAzVAqO,GAAYW,QACZX,EAAYrS,OAAOoiB,GACftF,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAElCjZ,EAAUC,KAAK,SAASC,GA6IpB,QAAS2pB,KACJ/Q,GACD/E,EAAEhR,OAAM,2BACLgH,aACA1F,SAASA,GACTrH,KAAK+b,GAKZ,QAAS+Q,KACJ9Q,GACDjF,EAAEhR,OAAM,2BACLgH,aACA1F,SAASA,GACTrH,KAAKic,GA8JZ,QAASqK,GAAQhI,GAEb,GAAIyO,GAAiBhW,EAAEhR,OAAM,2BACxByW,MACDtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAC/BpJ,IAAI,SAAStM,EAAEnF,GACZ,OACIwJ,IAAKrE,EAAEqE,IACPkhB,KAAMvlB,EAAEulB,KACRhjB,QAASvC,EAAEuC,QACX7B,OAAQV,EAAEU,OAAO4D,OAAO,SAAStE,EAAEnF,GAC/B,MAAOwf,GAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,IAAM8C,EAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,KAEnE0O,eAAgBjmB,EAAEimB,kBAIlCD,GAAehgB,aAAa1F,SAASA,GAAUrH,KAAKohB,GAGpDyL,IACAC,IA9UJ,GAAIhnB,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAAW8oB,EAAcpH,EAAM5f,SAAW,EAkB5G,IAjBAvE,EAAMqR,OAAS,WACM,IAAb1L,EACAvB,EAAU9F,KAAM0B,GAEhBoE,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAGvDA,EAAMoE,UAAYnG,KAElBgR,EACKmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QAChCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,KAAKlI,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAWA,QAE/E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,SAItC4e,EAAMvmB,SAASiB,GAAE,UAAY,SAAS+d,GAClCgI,EAAQhI,KAIZ5X,EAAI0a,EAAMpc,SACVmJ,EAAIiT,EAAMtH,QAGV,IAAI5T,GAAOJ,EAAUK,UAAS,0BAA2BjD,MAAMA,IAC3DkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,6BAA6BD,OAAM,KACnFyQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAEjC,IAAI2mB,GAAa9Q,EAAO9V,OAAM,KAAMC,KAAI,QAAU,WAClD2mB,GAAW5mB,OAAM,KAAMC,KAAI,QAAU,iBAAiBD,OAAM,QAC5D4mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,gBACrC2mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,gBACrC2mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,gBACrC2mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,iBAElB6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,eAG/Ckc,IAGDC,EAAO1c,MAAMzB,GAEbwS,EAAEhR,OAAM,kBACHyW,MAAMtZ,GACNlD,KAAK0iB,GAEa,WAAnByK,EACAjnB,EAAKH,OAAM,kBACNQ,KAAI,YAAc,eAAiB/B,EAAgB,KAC9B,QAAnB2oB,IACFxK,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAAW8oB,EAAcpH,EAAM5f,SAAW,IAG5GC,EAAKH,OAAM,kBACNQ,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,OAlB1D0S,EAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,SAsB9Cf,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEnEiY,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAIvD0e,IACAC,EACKld,MAAMzB,GACN0B,OAAOzB,GACPL,QAAQC,KAAKD,EAAOC,KAAMC,IAAIF,EAAOE,MACrCmC,aAAaV,GACbd,OAAO0B,GACZR,EAAKH,OAAM,mBAAoB/F,KAAKkjB,IAGxCnM,EAAEhR,OAAM,iCACHQ,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpB4c,EACKpb,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAE9C,IAAI2G,GAAYrM,EAAEhR,OAAM,iBACnByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WA+C/C,IA3CIX,GACAC,EACK7U,MAAMR,GACNid,OAAO/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAC/CwZ,UAAUlY,EAAiB,GAGhCwX,GACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GA0BpCwS,EAAEhR,OAAM,2BACHQ,KAAI,YAAc,eAAiB/B,EAAkB,KAKtDyoB,EAIG,CACHpH,EAAM7f,MAAMzB,GACZwS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,gBAAmB/B,EAAkBL,EAAOsR,OAASoQ,EAAM1hB,SAASE,KAAO,KAC7FmY,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,YAC1Czc,KAAK6lB,EACV,IAAIvH,GAASuH,EAAMG,MAAMC,QAAUJ,EAAMjN,UAAYiN,EAAMG,MAAM1H,QACnD,QAAXA,GACCgI,EAAQhI,OAXZ8E,GAAUpjB,KAAKohB,GACfyL,IACAC,GAgBJpK,GAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAC1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAGVmQ,EAAiB5jB,SAASiB,GAAE,mBAAqB,SAASC,GACtD4gB,EAAMF,iBACN,IAAI2C,GAAa7C,EAAY8C,EAAgBC,IAmC7C,IAlCA7gB,EACKmI,OAAO,SAASL,EAAQpJ,GAErB,MADAoJ,GAAOqY,YAAczhB,GACboJ,EAAOyR,WAAazR,EAAOgiB,iBAEtCnkB,QAAQ,SAASmC,EAAOpJ,GACrB,GAAI0c,GAAS2O,EAAepH,EAAMG,MAAMC,QAAUJ,EAAM7gB,SAASO,SAAWsgB,EAAMG,MAAM1H,SAAY5X,EAAEnB,SAClG6nB,EAAgBpiB,EAAOvD,OAAO4D,OAAO,SAAStE,EAAEnF,GAGhD,MAAG0c,GAAO,IAAMA,EAAO,GACZ8C,EAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,IAAM8C,EAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,GAExD8C,EAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,IAAM8C,EAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,IAIvE0C,GAAapiB,EAAG4I,kBAAkB4lB,EAAe5sB,EAAE0E,YAAakc,EAAM1a,IACtE,IAAIwH,GAAQkf,EAAcpM,GACtBqM,EAAc3rB,EAAMyM,IAAID,EAAO8S,EACf,QAAhBqM,GACAjM,EAAML,eAAenf,EAAGof,GAAY,GAE1Bve,SAAVyL,IACgBzL,SAAhBohB,IAA2BA,EAAc3V,GACtBzL,SAAnBqhB,IAA8BA,EAAiBpiB,EAAMsD,SAAStD,EAAMgF,IAAIwH,EAAM8S,KAClF+C,EAAQ1hB,MACJ+I,IAAKJ,EAAOI,IACZR,MAAOyiB,EACPniB,MAAOA,EAAMF,EAAOA,EAAOqY,aAC3BngB,KAAMgL,OAId6V,EAAQ/iB,OAAS,EAAG,CACpB,GAAIgjB,GAAStiB,EAAMoY,SAAStU,OAAOhF,EAAE+C,QACjC0gB,EAAe3b,KAAKC,IAAI7G,EAAMoY,SAASvU,SAAS,GAAK7D,EAAMoY,SAASvU,SAAS,IAC7EkD,EAAY,IAAOwb,EACnBrb,EAAmBhK,EAAG4J,kBAAkBub,EAAQ1Q,IAAI,SAAStM,GAAG,MAAOA,GAAE6D,QAASoZ,EAAOvb,EACpE,QAArBG,IACAmb,EAAQnb,GAAkBqC,WAAY,GAG9C,GAAIqiB,GAAwB,SAASvmB,EAAEnF,GACnC,MAAY,OAALmF,EAAY,MAAQkV,EAAM7E,aAAarQ,GAGlDmc,GAAiBpkB,QACZuL,eAAe6Y,EAAiBpkB,QAAQuL,kBAAoBijB,GAC5DpqB,MACG0H,MAAOlJ,EAAMgF,IAAKmd,EAAY7C,GAC9BhZ,MAAOgZ,EACPhW,OAAQ+Y,MAGhBb,EAAiBre,gBAAgBif,KAIrCZ,EAAiB5jB,SAASiB,GAAE,eAAiB,SAASC,GAClD,GAAIsjB,GAAgBC,IAEpB7gB,GAAKmI,OAAO,SAASL,EAAQpJ,GAEzB,MADAoJ,GAAOqY,YAAczhB,GACboJ,EAAOyR,WAChB5T,QAAQ,SAASmC,GAChB,GAAIgW,GAAapiB,EAAG4I,kBAAkBwD,EAAOvD,OAAQjH,EAAE0E,YAAaxD,EAAMgF,KACtEwH,EAAQlD,EAAOvD,OAAOuZ,EAC1B,IAAqB,mBAAV9S,GAAX,CAC8B,mBAAnB4V,KAAgCA,EAAiBpiB,EAAMsD,SAAStD,EAAMgF,IAAIwH,EAAM8S,IAC3F,IAAIuM,GAAO7rB,EAAMoY,SAASpY,EAAMyM,IAAID,EAAM8S,GAC1C+C,GAAQ1hB,MACJ6L,MAAOA,EACP8S,WAAYA,EACZnV,KAAMiY,EAAgByJ,GACtBlK,YAAarY,EAAOqY,YACpBrY,OAAQA,OAIhBoW,EAAM9hB,SAASqG,aAAaoe,KAGhCb,EAAiB5jB,SAASiB,GAAE,kBAAmB,SAASC,GACpD4gB,EAAMF,oBAGV5hB,EAASiB,GAAE,cAAgB,SAASC,GACN,mBAAfA,GAAEic,UAA4BvZ,EAAKlC,SAAWR,EAAEic,SAASzb,SAChEkC,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAGjC+O,EAAM8L,SAAWjc,EAAEic,UAEvB/a,EAAMqR,aAgDd1B,EAAYS,UAAS,uBACdpQ,EAlaX,GAoBMgF,GACAyH,EArBFiT,EAAQxiB,EAAGI,OAAO4H,OAChBmV,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAClB4L,EAAS9jB,EAAGI,OAAO0jB,SACnBQ,EAAmBtkB,EAAGkE,uBACtBhE,EAAUF,EAAGI,OAAOF,UACpB+mB,EAAQjnB,EAAGI,OAAO6mB,MAAMjnB,EAAGI,OAAO4H,QAGpCzC,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZzX,EAAQtM,EAAGG,MAAMuQ,eACjBtJ,EAAQ,KACRC,EAAS,KACTwc,GAAa,EACb0K,EAAiB,MACjBrR,GAAY,EACZE,GAAY,EACZM,GAAkB,EAClB2G,GAA0B,EAG1BgK,GAAc,EACdtc,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACfpM,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,cAAgB,cAAe,cAAe,cAAe,aACnF+H,EAAW,GAIjB0U,GAAM9E,OAAM,UAAWW,YAAY,GACnCqE,EAAMhF,OAAOqF,EAAkB,QAAU,QAEzC8E,EAAMf,UAAS,GAAMhZ,SAAS,GAE9BvI,EAAQuL,eAAe,SAAStD,EAAGnF,GAC/B,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAC9B0I,gBAAgB,SAASvD,EAAGnF,GAC3B,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAGjCshB,EAAiBpkB,QAAQuL,eAAe,SAAStD,EAAGnF,GAChD,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAC9B0I,gBAAgB,SAASvD,EAAGnF,GAC3B,MAAOma,GAAM3E,aAAarQ,EAAGnF,IAQjC,IAAIyP,GAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,GAE7Cya,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,cAKjDoF,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACSlO,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,MAudhD,OAhHAwf,GAAM9hB,SAASiB,GAAE,2BAA6B,SAASoc,GAC/CA,EAAI3R,OAAOgiB,gBACXluB,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAIjCsc,EAAM9hB,SAASiB,GAAE,0BAA4B,SAASoc,GAClD7d,EAAQgG,QAAO,KAQnBpD,EAAMpC,SAAWA,EACjBoC,EAAM0f,MAAQA,EACd1f,EAAMghB,OAASA,EACfhhB,EAAMmkB,MAAQA,EACdnkB,EAAMqa,MAAQA,EACdra,EAAM8rB,OAAS3H,EAAM9J,MACrBra,EAAMua,MAAQA,EACdva,EAAM+rB,OAAS5H,EAAM5J,MACrBva,EAAMwhB,iBAAmBA,EACzBxhB,EAAM5C,QAAUA,EAChB4C,EAAMiP,MAAQA,EACdjP,EAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9E4lB,gBAAiBtf,IAAK,WAAW,MAAOsf,IAAkBrf,IAAK,SAASvG,GAAG4lB,EAAe5lB,IAC1FuU,WAAiBjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAChFyU,WAAenO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC9Ewa,cAAkBlU,IAAK,WAAW,MAAOkU,IAAgBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IACvFoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAErE0lB,aAAiBpf,IAAK,WAAW,MAAOof,IAAenf,IAAK,SAASvG,GAAG0lB,EAAY1lB,IACpFmmB,aAAkB7f,IAAK,WAAW,MAAOgY,GAAM5f,UAAY6H,IAAK,SAASvG,GAAGse,EAAM5f,OAAOsB,KACzFomB,gBAAoB9f,IAAK,WAAW,MAAOgY,GAAM/J,aAAehO,IAAK,SAASvG,GAAGse,EAAM/J,UAAUvU,KACjGqmB,gBAAoB/f,IAAK,WAAW,MAAOgY,GAAM7J,aAAelO,IAAK,SAASvG,GAAGse,EAAM7J,UAAUzU,KACjG2e,aAAcrY,IAAK,WAAW,MAAOgY,GAAMK,eAAiBpY,IAAK,SAASvG,GAAGse,EAAMK,YAAY3e,KAG/FsmB,aAAchgB,IAAK,WAAW,MAAOgY,GAAM1hB,QAAS2J,IAAK,SAASvG,GAChD9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBwhB,EAAM1hB,OAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASmQ,EAAM1hB,OAAOuR,MACvEmQ,EAAM1hB,OAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAASoQ,EAAM1hB,OAAOsR,OACvEoQ,EAAM1hB,OAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASyhB,EAAM1hB,OAAOC,OAE3ED,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClB+Z,EAAM/Z,SAASA,GACfwe,EAAMxe,SAASA,GACf0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,KAEnB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,GACbkW,EAAMlW,MAAMA,GACZ2a,EAAM3a,MAAMA,KAEhB2b,aAAchZ,IAAK,WAAW,MAAOuT,GAAMyF,eAAiB/Y,IAAK,SAASvG,GACtE6Z,EAAMyF,YAAYtf,GAClBse,EAAMgB,YAAYtf,KAEtBuf,aAAcjZ,IAAK,WAAW,MAAOkO,GAAM3E,cAAgBtJ,IAAK,SAASvG,GACrEwU,EAAM3E,WAAW7P,GACjBse,EAAMiB,YAAYvf,KAEtBwf,aAAclZ,IAAK,WAAW,MAAOoO,GAAM7E,cAAgBtJ,IAAK,SAASvG,GACrE0U,EAAM7E,WAAW7P,GACjBse,EAAMkB,YAAYxf,KAEtBb,GAAImH,IAAK,WAAW,MAAOuT,GAAM1a,KAAOoH,IAAK,SAASvG,GAClD6Z,EAAM1a,EAAEa,GACRse,EAAMnf,EAAEa,KAEZ4G,GAAIN,IAAK,WAAW,MAAOuT,GAAMjT,KAAOL,IAAK,SAASvG,GAClD6Z,EAAMjT,EAAE5G,GACRse,EAAM1X,EAAE5G,KAEZ+U,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQqF,EAAkB,QAAU,UAE9C2G,yBAA0BpV,IAAK,WAAW,MAAOoV,IAA2BnV,IAAK,SAASvG,GACtF0b,EAA0B1b,EACtB0b,IACA7B,EAAMN,aAAY,GAClBM,EAAMoD,YAAW,QAK7B5lB,EAAGG,MAAMkW,eAAevT,EAAO0f,GAC/BxiB,EAAGG,MAAMqP,YAAY1M,GAEdA,GAGX9C,EAAGI,OAAO8uB,mBAAqB,WAC7B,MAAOlvB,GAAGI,OAAO4tB,YACdzoB,QAASsR,OAAQ,KACjBwX,aAAa,ICviBlBruB,EAAGI,OAAO+uB,iBAAmB,WACzB,YA0GA,SAASrsB,GAAMsB,GAkaX,MAjaAA,GAAUC,KAAK,SAASC,GA+QpB,QAAS4iB,GAAW/e,GAChB,GAAIvG,KAAW,KAALuG,GACNL,EAAIlG,EAAI,EAAI,GACZ2N,EAAI6f,EAAmB,CAC3B,OAAO,IAAM,GAAMtnB,EAAK,IAAMyH,EACxB,YAAc3N,EAAI,IAAO,IAAMkG,EAAK,KAAOyH,EAAI,GAC/C,KAAO,EAAIA,EAAI,GACf,YAAc3N,EAAI,IAAM,GAAMkG,EAAK,IAAO,EAAIyH,EAC9C,KACO,IAAMzH,EAAK,KAAOyH,EAAI,GAC7B,KAAO,EAAIA,EAAI,GACf,IAAO,IAAMzH,EAAK,KAAOyH,EAAI,GAC7B,KAAO,EAAIA,EAAI,GAIzB,QAAS4X,KACAC,EAAMC,SAASD,EAAM1H,OAAO4H,GACjCC,GACKjjB,MAAM8iB,EAAMC,QAAUgI,EAAG1oB,SAAW2gB,IACpCjjB,KAAK,SAAS8D,EAAEnF,GACb,GAAIwkB,GAAY6H,EAAGlnB,EAAE,IAAMknB,EAAG5oB,QAAQ,GAClCghB,EAAa4H,EAAG5oB,QAAQ,GAAK4oB,EAAGlnB,EAAE,GACtC3H,IAAG2G,OAAOpG,MAAMoG,OAAM,SACjBQ,KAAI,QAAuB,EAAZ6f,EAAgB,EAAIA,GAExChnB,GAAG2G,OAAOpG,MAAMoG,OAAM,UACjBQ,KAAI,IAAM0nB,EAAGlnB,EAAE,KACfR,KAAI,QAAuB,EAAb8f,EAAiB,EAAIA,KAIpD,QAASC,KACLJ,EAAcF,EAAMC,QAAU,KAAOD,EAAM1H,SAC3CA,EAAS0H,EAAMC,QAAUgI,EAAG1oB,SAAWygB,EAAM1H,SAC7Chf,EAAS0mB,OAAO1H,OAAQA,EAAQ0H,MAAOA,IACvCD,IAGAjB,EACK9e,MAAMzB,GACN0B,OAAOioB,GACPhjB,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAAYvZ,EAAKtB,GAAGusB,OAElE/M,EACKpb,MAAMzB,GACN0B,OAAOioB,GACPhjB,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAAavZ,EAAKtB,GAAGusB,MAEnE,IAAIC,GAAgBrX,GAAEhR,OAAM,0BACvByW,MAAO6R,GAASrtB,OACbqtB,GACKhb,IAAI,SAAStM,EAAEnF,GACZ,OACIwJ,IAAKrE,EAAEqE,IACP3D,OAAQV,EAAEU,OAAO4D,OAAO,SAAStE,EAAEnF,GAC/B,MAAOkjB,GAAKpe,IAAIK,EAAEnF,IAAM0c,EAAO,IAAMwG,EAAKpe,IAAIK,EAAEnF,IAAM0c,EAAO,UANrD7W,aAY5BslB,EAAiBhW,GAAEhR,OAAM,2BACxByW,MAAM8R,EAAYC,MAAe9mB,YAC3B8mB,GACCljB,OAAO,SAASmjB,GAAY,OAAQA,EAAS/R,WAC7CpJ,IAAI,SAAStM,EAAEnF,GACX,OACI0qB,KAAMvlB,EAAEulB,KACRH,YAAaplB,EAAEolB,YACfD,YAAanlB,EAAEmlB,YACf9gB,IAAKrE,EAAEqE,IACP3D,OAAQV,EAAEU,OAAO4D,OAAO,SAAStE,EAAEnF,GAC/B,MAAOwf,GAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,IAAM8C,EAAM1a,IAAIK,EAAEnF,IAAM0c,EAAO,QAQnF5X,GADA2nB,GAASrtB,SAAWytB,EAChB3J,EAAK9f,SAELoc,EAAMpc,SAGd+W,EACK7U,MAAMR,GACNid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAUwR,EAAkB,GAEjCnS,EAAMxW,QAAQ+C,KAAKyiB,KAAKzM,EAAO,IAAKhW,KAAKwB,MAAMwU,EAAO,MAEtDvH,GAAEhR,OAAM,iBAAkBgH,aAAa1F,SAASmkB,GAC3CxrB,KAAK+b,GAGVqS,EAAcrhB,aAAa1F,SAASmkB,GAAoBxrB,KAAK8kB,GAC7DiI,EAAehgB,aAAa1F,SAASmkB,GAAoBxrB,KAAKohB,GAG9DrK,GAAEhR,OAAM,2BACHQ,KAAI,YAAc,eAAiBimB,EAAGnnB,QAAQ,GAAK,KAExDqpB,EACKxnB,MAAMslB,GACN7I,OAAQ/kB,EAAGG,MAAM2U,WAAWwa,EAAiB,GAAIhrB,IACjDwZ,UAAUnY,EAAgB,GAC/BkpB,EACKvmB,MAAMynB,GACNhL,OAAQ/kB,EAAGG,MAAM2U,WAAWwa,EAAiB,GAAIhrB,IAGlDurB,EAGAhB,EAAO/Q,SAAS6R,GAAUvtB,OAAS,GAAKuD,EAAgB,GAFxDkpB,EAAO/Q,SAAS2R,GAASrtB,OAAS,GAAKuD,EAAgB,EAM3D,IAAIqqB,GAAcP,GAASrtB,OAAS,EAAI,EACpC6tB,EAAeN,GAAUvtB,SAAWstB,EAAYC,IAAa,EAAI,EAEjEO,EAAYL,EAAmBI,EAAeD,EAC9CG,EAAYN,EAAmBG,EAAcC,CAEjD9X,IAAEhR,OAAM,4BACHsD,MAAK,UAAYylB,GACtB/X,GAAEhR,OAAM,4BACHsD,MAAK,UAAY0lB,GACjBxoB,KAAI,YAAc,aAAeG,EAAErB,QAAQ,GAAK,OAErD0R,GAAEhR,OAAM,4BAA6BgH,aAAa1F,SAASmkB,GACtDxrB,KAAK0uB,GACV3X,GAAEhR,OAAM,4BAA6BgH,aAAa1F,SAASmkB,GACtDxrB,KAAKytB,GAzZd,GAAI3nB,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3D+pB,EAAmBtvB,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IACxD8oB,EAAcS,EAAc,GACnCM,EAAmBN,EAAcsB,EAAQ3qB,IAAM2qB,EAAQvZ,MAa3D,IAXA/T,EAAMqR,OAAS,WAAajN,EAAUiH,aAAa1F,SAASmkB,GAAoBxrB,KAAK0B,IACrFA,EAAMoE,UAAYnG,KAElBgR,EACKmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QAChCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,KAAKlI,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,QAItC,IAAIonB,IAAWnrB,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,UAAY1V,EAAEonB,MAC7DI,GAAYrrB,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAEonB,KAGhDznB,GADA2nB,GAASrtB,SAAWytB,EAChB3J,EAAK9f,SAELoc,EAAMpc,SAGdipB,EAAKT,EAAOtmB,QAGZslB,EAAKiC,EAAmBrN,EAAMtH,SAAWgL,EAAKhL,SAC9C6U,EAAKF,EAAmB3J,EAAKhL,SAAWsH,EAAMtH,SAC9CmV,EAAKR,EAAmBS,EAAOpV,SAAWqV,EAAMrV,SAChDsV,EAAKX,EAAmBU,EAAMrV,SAAWoV,EAAOpV,QAEhD,IAAIuV,IAAUnsB,EACTmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAAagS,GAAoB1nB,EAAEonB,IAAMpnB,EAAEonB,OAC1E9a,IAAI,SAAStM,GACV,MAAOA,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,EAAEnF,GAAIuM,EAAGsS,EAAK1Z,EAAEnF,QAIzC0tB,GAAUpsB,EACTmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAAagS,EAAmB1nB,EAAEonB,KAAOpnB,EAAEonB,OAC1E9a,IAAI,SAAStM,GACV,MAAOA,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,EAAEnF,GAAIuM,EAAGsS,EAAK1Z,EAAEnF,OAI7C8E,GAAErB,OAAO,EAAGd,IAEZ0pB,EAAK1oB,OAAOnG,GAAGkf,OAAOlf,GAAGmf,MAAM8Q,GAAQ/uB,OAAOgvB,KAAW,SAASvoB,GAAK,MAAOA,GAAEL,KAC3ErB,OAAO,EAAGd,GAGf,IAAI2B,IAAOJ,EAAUK,UAAS,4BAA6BjD,MAAMA,IAC7DkZ,GAASlW,GAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,+BAA+BD,OAAM,KACrFyQ,GAAI7Q,GAAKH,OAAM,IAEnBqW,IAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAGjC,IAAI2mB,IAAa9Q,GAAO9V,OAAM,KAAMC,KAAI,QAAU,WAClD2mB,IAAW5mB,OAAM,KAAMC,KAAI,QAAU,gBACrC2mB,GAAW5mB,OAAM,KAAMC,KAAI,QAAU,iBACrC2mB,GAAW5mB,OAAM,KAAMC,KAAI,QAAU,iBACrC2mB,GAAW5mB,OAAM,KAAMC,KAAI,QAAU,eACrC2mB,GAAW5mB,OAAM,KAAMC,KAAI,QAAU,eAGrC,IAAIgpB,IAAenT,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aAapD,IAZAgpB,GAAajpB,OAAM,KAAMC,KAAI,QAAU,gBACvCgpB,GAAajpB,OAAM,KAAMC,KAAI,QAAU,iBACvCgpB,GAAajpB,OAAM,KAAMC,KAAI,QAAU,iBACvCgpB,GAAajpB,OAAM,KAAMC,KAAI,QAAU,eACvCgpB,GAAajpB,OAAM,KAAMC,KAAI,QAAU,gBACvCgpB,GAAajpB,OAAM,KAAMC,KAAI,QAAU,sBACvCgpB,GAAajpB,OAAM,KAAMC,KAAI,QAAU,iBAMlCkc,EAEE,CACH,GAAI6H,IAAc5H,EAAOkH,QAAUrlB,EAAiB,EAAIA,EACpDirB,GAAkB9M,EAAOkH,QAAUU,GAAc,CAErD5H,GAAO1c,MAAMskB,IAEbvT,GAAEhR,OAAM,kBACHyW,MAAMtZ,EAAKmQ,IAAI,SAASrI,GAOrB,MANAA,GAAOykB,YAAqChtB,SAAvBuI,EAAOykB,YAA4BzkB,EAAOI,IAAMJ,EAAOykB,YACzEhB,EACCzjB,EAAOI,IAAMJ,EAAOykB,aAAezkB,EAAOmjB,IAAMuB,EAAsBC,GAEtE3kB,EAAOI,IAAMJ,EAAOykB,aAAezkB,EAAOmjB,IAAMwB,EAAqBD,GAElE1kB,KAEVhL,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SAEpBioB,EAAmBtvB,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,GAAUupB,GAG7E3W,GAAEhR,OAAM,kBACHQ,KAAI,YAAc,aAAeipB,GAAkB,KAAQrrB,EAAOE,IAAK,SA1B5E0S,IAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,QA6B9Cf,IAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAOvE0S,GAAEhR,OAAM,eAAgBsD,MAAK,UAAY4jB,EAAc,UAAY,QAEnEkC,EACKnpB,MAAMzB,GACN0B,OAAO+nB,GACP9iB,MAAMhI,EAAKmQ,IAAI,SAAUtM,EAAGnF,GACzB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAUtE,EAAGnF,GACnB,OAAQsB,EAAKtB,GAAG6a,UAAYvZ,EAAKtB,GAAGusB,OAE5Ce,EACKlpB,MAAMzB,GACN0B,OAAO+nB,GACP9iB,MAAMhI,EAAKmQ,IAAI,SAAUtM,EAAGnF,GACzB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAUtE,EAAGnF,GACnB,OAAQsB,EAAKtB,GAAG6a,WAAavZ,EAAKtB,GAAGusB,MAG7C,IAAIyB,IAAY7Y,GAAEhR,OAAM,4BACnByW,MAAM6R,GAASrtB,OAASqtB,KACpB5mB,aAELooB,GAAa9Y,GAAEhR,OAAM,6BACpByW,MAAM8R,EAAYC,MACV9mB,YACF8mB,GAAUljB,OAAO,SAASmjB,GACxB,OAAQA,EAAS/R,WAG9B1F,IAAEhR,OAAM,eACHQ,KAAI,YAAc,gBAAmB2nB,EAAmB/pB,EAAOsR,OAASuZ,EAAQ3qB,KAAO,KAE5FurB,GAAU7iB,aAAa/M,KAAKmvB,GAC5BU,GAAW9iB,aAAa/M,KAAKkvB,GAGzBvB,IACAH,EACK7J,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAiB,IAAKrB,IAClDwZ,UAAUsR,EAAkB,GACjCjX,GAAEhR,OAAM,6BACHQ,KAAI,YAAc,eAAiB0oB,EAAG5pB,QAAQ,GAAK,KACxD0R,GAAEhR,OAAM,6BAA8BgH,aACjC/M,KAAKwtB,IAGVI,IACAkC,EACK5oB,MAAM+nB,GACNtL,OAAQqK,EAAmB,IAC3BtR,UAAWnY,EAAgB,GAChCwrB,EACK7oB,MAAMkoB,GACNzL,OAAQqK,EAAmB,IAC3BtR,SAAS2R,GAASrtB,OAAS,GAAKuD,EAAgB,GAErDwS,GAAEhR,OAAM,8BACHsD,MAAK,UAAYglB,GAASrtB,OAAS,EAAI,GACvCuF,KAAI,YAAc,eAAiB0nB,EAAG5oB,QAAQ,GAAK,KACxD0R,GAAEhR,OAAM,8BACHsD,MAAK,UAAYklB,GAAUvtB,OAAS,EAAI,GACxCuF,KAAI,YAAc,aAAe0nB,EAAG5oB,QAAQ,GAAK,OAEtD0R,GAAEhR,OAAM,8BAA+BgH,aAClC/M,KAAK8vB,GACV/Y,GAAEhR,OAAM,8BAA+BgH,aAClC/M,KAAK+vB,IAId/J,EAAMtf,EAAEunB,GAAI1tB,GAAE,QAAU+lB,GAEpBJ,GAAaF,EAAM1H,OAAO4H,EAE9B,IAAIC,IAAUpP,GAAEhR,OAAM,uBAAwBI,UAAS,KAClDjD,MAAMgjB,GAAeF,EAAM1H,WAE5BoI,GAAeP,GAAQ9f,QACtBC,OAAM,IAEXogB,IAAapgB,OAAM,QACdC,KAAI,QAAU,QACdA,KAAI,IAAM,GACVA,KAAI,IAAM,GACVA,KAAI,SAAWynB,GAEpBtH,GAAapgB,OAAM,QACdC,KAAI,QAAU,SACdA,KAAI,IAAM,GACVA,KAAI,IAAM,GACVA,KAAI,SAAWynB,EAEpB,IAAIrH,IAAS5P,GAAEhR,OAAM,kBAChB/F,KAAKgmB,EACVW,IAAOxgB,UAAS,QAEXI,KAAI,SAAWynB,GACpBrH,GAAOxgB,UAAS,WAAYG,OAAM,QAASC,KAAI,IAAMuf,GAMrDpD,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAC1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAIVzT,EAASiB,GAAE,cAAgB,SAASC,GACN,mBAAfA,GAAEic,WACTvZ,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAEjC+O,EAAM8L,SAAWjc,EAAEic,UAEvB/a,EAAMqR,WAsJVuT,MAIG5kB,EAtgBX,GA4BM4c,GAEA5X,EACAunB,EACAzB,EACAmC,EACAM,EACAG,EAnCFhO,EAAQxiB,EAAGI,OAAO4H,OAChBsoB,EAAStwB,EAAGI,OAAO4H,OACnBke,EAAOlmB,EAAGI,OAAOosB,gBACjB+D,EAAQvwB,EAAGI,OAAOosB,gBAClBrP,EAAQnd,EAAGI,OAAO8X,OAClB0W,EAAS5uB,EAAGI,OAAO8X,OACnB4X,EAAS9vB,EAAGI,OAAO8X,OACnB2W,EAAS7uB,EAAGI,OAAO8X,OACnBgZ,EAASlxB,EAAGI,OAAO8X,OACnBiZ,EAASnxB,EAAGI,OAAO8X,OACnB4L,EAAS9jB,EAAGI,OAAO0jB,SACnBsD,EAAQ5mB,GAAG0V,IAAIkR,QACflnB,EAAUF,EAAGI,OAAOF,UAGtBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZqM,GAAW3qB,IAAK,EAAGqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAChD4B,EAAQ,KACRC,EAAS,KACT4S,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9BjD,EAAQtM,EAAGG,MAAMuQ,eACjBmT,GAAa,EACbwK,GAAc,EACdW,GAAiB,EACjBD,GAAiB,EACjBD,EAAc,GAEdxH,EAAc,KAOdvQ,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,QAAU,cAAe,eAC/CksB,EAAqB,EACrB7a,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACf4N,EAAqB,eACrBD,EAAsB,gBACtBjB,GAAmB,CAGzBrN,GAAMf,UAAS,GACf6O,EAAOpO,aAAY,GAEnBoO,EAAOtI,YAAY,SAAS7f,GAAK,OAAO,IACxCgV,EAAM9E,OAAM,UAAWW,YAAY,GACnC8W,EAAOzX,OAAM,QACbwW,EAAOxW,OAAM,SACbuW,EAAOvW,OAAM,UAAWW,YAAY,GACpCkY,EAAO7Y,OAAM,QACb8Y,EAAO9Y,OAAM,SAEbnY,EAAQsL,eAAc,GAAME,gBAAgB,SAASvD,EAAGnF,GACpD,MAAOma,GAAM3E,aAAarQ,EAAGnF,IAOjC,IAAIouB,GAAc,WACd,MAAOvB,IACCwB,KAAMxC,EAAQ5H,MAAOkK,IACrBE,KAAMvB,EAAQ7I,MAAOiK,IAG7BI,EAAe,WACf,MAAOzB,IACCwB,KAAMvB,EAAQ7I,MAAOiK,IACrBG,KAAMxC,EAAQ5H,MAAOkK,IAG7BjO,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,cAKjDoF,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACSlO,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,OAK5C0sB,EAAc,SAASprB,GACzB,MAAOA,GAAKgP,MAAM,SAASlH,GACzB,MAAOA,GAAOyR,WAojBlB,OAvIA2E,GAAM9hB,SAASiB,GAAE,2BAA6B,SAASoc,GACnD7d,EACKuI,SAAS,KACTgD,eAAe,SAAStD,EAAGnF,GACxB,MAAOsuB,KAAeD,KAAK7Y,aAAarQ,EAAGnF,KAE9CsB,KAAKyZ,GACL7X,QAAO,KAGhBsc,EAAM9hB,SAASiB,GAAE,0BAA4B,SAASoc,GAClD7d,EAAQgG,QAAO,KAGnBggB,EAAKxlB,SAASiB,GAAE,2BAA6B,SAASoc,GAClDA,EAAI/R,MAAQlJ,EAAMgF,IAAIiW,EAAIzZ,MAC1ByZ,EAAW,QACP/R,MAAOlJ,EAAMyM,IAAIwO,EAAIzZ,MACrBgI,MAAOyR,EAAIzR,OAEfpM,EACKuI,SAAS,GACTgD,eAAe,SAAStD,EAAGnF,GACxB,MAAOouB,KAAcC,KAAK7Y,aAAarQ,EAAGnF,KAE7CsB,KAAKyZ,GACL7X,QAAO,KAGhBggB,EAAKxlB,SAASiB,GAAE,0BAA4B,SAASoc,GACjD7d,EAAQgG,QAAO,KAGnBggB,EAAKxlB,SAASiB,GAAE,2BAA6B,SAASoc,GAClD7d,MAWJ4C,EAAMpC,SAAWA,EACjBoC,EAAMghB,OAASA,EACfhhB,EAAM0f,MAAQA,EACd1f,EAAMwtB,OAASA,EACfxtB,EAAMojB,KAAOA,EACbpjB,EAAMytB,MAAQA,EACdztB,EAAMqa,MAAQA,EACdra,EAAM8rB,OAASA,EACf9rB,EAAMgtB,OAASA,EACfhtB,EAAM+rB,OAASA,EACf/rB,EAAMouB,OAASA,EACfpuB,EAAMquB,OAASA,EACfruB,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9E2e,aAAiBrY,IAAK,WAAW,MAAOqY,IAAepY,IAAK,SAASvG,GAAG2e,EAAY3e,IACpFoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IACrE0lB,aAAiBpf,IAAK,WAAW,MAAOof,IAAenf,IAAK,SAASvG,GAAG0lB,EAAY1lB,IACpFmmB,aAAiB7f,IAAK,WAAW,MAAO6f,IAAe5f,IAAK,SAASvG,GAAGmmB,EAAYnmB,IACpFomB,gBAAoB9f,IAAK,WAAW,MAAO8f,IAAkB7f,IAAK,SAASvG,GAAGomB,EAAepmB,IAC7FqmB,gBAAoB/f,IAAK,WAAW,MAAO+f,IAAkB9f,IAAK,SAASvG,GAAGqmB,EAAermB,IAC7FooB,oBAAwB9hB,IAAK,WAAW,MAAO8hB,IAAsB7hB,IAAK,SAASvG,GAAGooB,EAAmBpoB,IACzGmoB,qBAAyB7hB,IAAK,WAAW,MAAO6hB,IAAuB5hB,IAAK,SAASvG,GAAGmoB,EAAoBnoB,IAG5GpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DypB,aAAchgB,IAAK,WAAW,MAAOmhB,IAAWlhB,IAAK,SAASvG,GAC1DynB,EAAQ3qB,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAAS2qB,EAAQ3qB,IAC7D2qB,EAAQtZ,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASsZ,EAAQtZ,MAC7DsZ,EAAQvZ,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAASuZ,EAAQvZ,OAC7DuZ,EAAQ5qB,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAAS4qB,EAAQ5qB,OAEjEiD,UAAWwG,IAAK,WAAW,MAAO2d,IAAsB1d,IAAK,SAASvG,GAClEikB,EAAqBjkB,IAEzB2D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,KAEjBxE,GAAImH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAC7CsR,EAAOtR,EACP6Z,EAAM1a,EAAEa,GACR2nB,EAAOxoB,EAAEa,GACTud,EAAKpe,EAAEa,GACP4nB,EAAMzoB,EAAEa,KAEZ4G,GAAIN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAC7CkZ,EAAOlZ,EACP6Z,EAAMjT,EAAE5G,GACR2nB,EAAO/gB,EAAE5G,GACTud,EAAK3W,EAAE5G,GACP4nB,EAAMhhB,EAAE5G,KAEZknB,kBAAsB5gB,IAAK,WAAW,MAAO4gB,IAAoB3gB,IAAK,SAASvG,GAE3E,GAAGknB,IAAqBlnB,EAAG,CACvB,GAAIilB,GAAKkC,CACTA,GAASjB,EACTA,EAASjB,CAET,IAAIyC,GAAKa,CACTA,GAASC,EACTA,EAASd,EAEbR,EAAiBlnB,EAEjBmnB,EAAOzX,OAAM,QACbwW,EAAOxW,OAAM,SACb6Y,EAAO7Y,OAAM,QACb8Y,EAAO9Y,OAAM,aAIrBrY,EAAGG,MAAMkW,eAAevT,EAAO0f,GAC/BxiB,EAAGG,MAAMqP,YAAY1M,GAEdA,GC1pBX9C,EAAGI,OAAOmxB,SAAW,WACjB,YA2CA,SAASzuB,GAAMsB,GAyUX,MAxUAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD3P,GAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,EACjB,IAAIsqB,GAAoB,CAkBxB,IAVGC,GAAYntB,EAAKlC,SAAQqvB,IACxB5oB,OAAQvE,EAAK,GAAGuE,OAAO4L,IAAI,SAAStM,GAC5B,OACIL,EAAGK,EAAEL,EACLyH,EAAG,EACHnD,OAAQjE,EAAEiE,OACVsD,KAAM,UAIlBgiB,EAAS,CACT,GAAIC,GAASnxB,GAAGmoB,OAAOiJ,QAClBviB,OAAOwiB,GACPhpB,OAAO,SAASV,GAAI,MAAOA,GAAEU,SAC7B0G,EAAEsS,IACLvd,EAAKlC,QAAUqvB,EAAWA,EAAWntB,EAEvCqtB,GAAO1nB,QAAQ,SAASmC,EAAQpJ,GAExBoJ,EAAO0lB,cACPxtB,EAAKtB,GAAG+uB,mBAAqBP,IAC7BG,EAAO3uB,GAAKsB,EAAKtB,IAGbA,EAAI,GAAK2uB,EAAO3uB,EAAI,GAAG8uB,cACvBH,EAAO3uB,GAAG6F,OAAO4L,IAAI,SAAStM,EAAEqT,GAC5BrT,EAAE4d,IAAM4L,EAAO3uB,EAAI,GAAG6F,OAAO2S,GAAGjM,EAChCpH,EAAEylB,GAAKzlB,EAAE4d,GAAK5d,EAAEoH,MAKhCjL,EAAOqtB,EAGXrtB,EAAK2F,QAAQ,SAASmC,EAAQpJ,GAC1BoJ,EAAOvD,OAAOoB,QAAQ,SAASqF,GAC3BA,EAAMlD,OAASpJ,EACfsM,EAAM9C,IAAMJ,EAAOI,QAKvBklB,GAAWptB,EAAKlC,OAAS,GACzBkC,EAAK,GAAGuE,OAAO4L,IAAI,SAAStM,EAAEnF,GAC1B,GAAIgvB,GAAU,EAAGC,EAAU,CAC3B3tB,GAAKmQ,IAAI,SAAStM,EAAGod,GACjB,IAAKjhB,EAAKihB,GAAKuM,aAAc,CACzB,GAAIpW,GAAIvT,EAAEU,OAAO7F,EACjB0Y,GAAEhM,KAAOhG,KAAKC,IAAI+R,EAAEnM,GAChBmM,EAAEnM,EAAE,GACJmM,EAAEkS,GAAKqE,EACPA,GAAoBvW,EAAEhM,OAGtBgM,EAAEkS,GAAKlS,EAAEhM,KAAOsiB,EAChBA,GAAoBtW,EAAEhM,UAS1C,IAAIoW,GAAc9L,GAAWI,KACzB9V,EAAKmQ,IAAI,SAAStM,EAAGod,GACjB,MAAOpd,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,EAAEnF,GAAIuM,EAAGsS,EAAK1Z,EAAEnF,GAAI+iB,GAAI5d,EAAE4d,GAAI6H,GAAIzlB,EAAEylB,GAAIrI,IAAIA,MAIzEzd,GAAEnB,OAAOqT,GAAWxZ,GAAGmf,MAAMmG,GAAYrR,IAAI,SAAStM,GAAK,MAAOA,GAAEL,KAC/DzB,WAAW6T,IAAW,EAAGvU,GAAiBusB,GAE/C3iB,EAAE5I,OAAOyT,GAAW5Z,GAAGkf,OAAOlf,GAAGmf,MAAMmG,GAAYrR,IAAI,SAAStM,GAC5D,GAAIxB,GAASwB,EAAEoH,CASf,OAPImiB,KAAYptB,EAAK6D,EAAEod,KAAKuM,eAEpBnrB,EADAwB,EAAEoH,EAAI,EACGpH,EAAEylB,GAEFzlB,EAAEylB,GAAKzlB,EAAEoH,GAGnB5I,IACRjF,OAAO4f,KACT7a,MAAM0U,IAAWvV,EAAiB,IAG/BkC,EAAEnB,SAAS,KAAOmB,EAAEnB,SAAS,KAC7BmB,EAAEnB,SAAS,GACPmB,EAAEnB,QAAQmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,GAAWmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,KACzEmB,EAAEnB,QAAM,GAAK,KAEnB4I,EAAE5I,SAAS,KAAO4I,EAAE5I,SAAS,KAC7B4I,EAAE5I,SAAS,GACP4I,EAAE5I,QAAQ4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,GAAW4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,KACzE4I,EAAE5I,QAAM,GAAK,KAEvB8Z,EAAKA,GAAM3Y,EACXie,EAAKA,GAAMxW,CAGX,IAAIjI,GAAOJ,EAAUK,UAAS,yBAA0BjD,MAAMA,IAC1DkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,4BACnD8V,EAAYjW,EAAUE,OAAM,QAC5B8V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aACjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvEgY,EAAU/V,OAAM,YACXC,KAAI,KAAO,gBAAkB4C,GAC7B7C,OAAM,QACXJ,EAAKH,OAAM,iBAAoBoD,EAAK,SAC/B5C,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpBuS,EAAExQ,KAAI,YAAc8Z,EAAW,qBAAuBlX,EAAK,IAAM,GAEjE,IAAI0b,GAAS3e,EAAKH,OAAM,cAAeI,UAAS,aAC3CjD,KAAK,SAAS6D,GAAK,MAAOA,IAAK,SAASA,EAAEnF,GAAK,MAAOA,IAC3DijB,GAAOxe,QAAQC,OAAM,KAChB+C,MAAK,iBAAmB,MACxBA,MAAK,eAAiB,KAE3B,IAAI0nB,GAAiB1f,EAChBtE,WAAW8X,EAAO7d,OAAOb,UAAS,eAAiB,eAAgBmC,KAAKF,IAAI,IAAKf,IACjFd,KAAI,IAAM,SAASQ,EAAGnF,EAAGwY,GACtB,GAAIsJ,GAAOiB,EAAG,IAAM,CAMpB,OALI2L,IACIptB,EAAK6D,EAAEiE,UAAY9H,EAAK6D,EAAEiE,QAAQ0lB,eAClChN,EAAOiB,EAAG5d,EAAE4d,KAGbjB,IAEVnd,KAAI,SAAW,GACfU,QACD8pB,GAAe/jB,OACf+jB,EAAe/jB,MAAM,SAASjG,EAAEnF,GAC5B,GAAIoL,GAAQpL,GAAKyF,GAAY2pB,EAAkB,IAAMpvB,CACrD,OAAOoL,KAEf6X,EACKte,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO,sBAAwBA,IAC7D0H,QAAO,QAAU,SAASvC,GAAK,MAAOA,GAAEsT,QACxChR,MAAK,OAAS,SAAStC,EAAEnF,GAAI,MAAOsJ,GAAMnE,EAAGnF,KAC7CyH,MAAK,SAAW,SAAStC,EAAEnF;AxB/M5C,AwB+MgD,ExB/M9C,CAAC,GAAG,AwB+MiDsJ,CxB/MhD,EwB+MsDnE,EAAGnF,CxB/MpD,CAAC,GwBgNDijB,EACKxb,CxBjNE,CAAC,IwBiNE,GxBjNK,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,AwBiNA,GACxBA,MxBlNiC,AwBkN5B,eAAiB8iB,EAE3B,IAAIrH,GAAOD,EAAO1e,UAAS,eACtBjD,KAAK,SAAS6D,GAAK,MAAQspB,KAAantB,EAAKlC,OAAUqvB,EAAS5oB,OAASV,EAAEU,QAChFqd,GAAK9d,OAAOC,QAEI6d,GAAKze,QAAQC,OAAM,QAC1BC,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAI,kBAAoB,oBACzE2E,KAAI,IAAM,SAASQ,EAAEnF,EAAEwY,GACpB,MAAOkW,KAAYptB,EAAKkX,GAAGsW,aAAe,EAAKtW,EAAI1T,EAAEpB,YAAcpC,EAAKlC,SAE3EuF,KAAI,IAAM,SAASQ,EAAEnF,EAAEwY,GAAK,MAAOuK,GAAG2L,IAAYptB,EAAKkX,GAAGsW,aAAe3pB,EAAE4d,GAAK,IAAM,IACtFpe,KAAI,SAAW,GACfA,KAAI,QAAU,SAASQ,EAAEnF,EAAEwY,GAAK,MAAO1T,GAAEpB,aAAegrB,IAAYptB,EAAKkX,GAAGsW,aAAe,EAAIxtB,EAAKlC,UACpGuF,KAAI,YAAc,SAASQ,EAAEnF,GAAK,MAAO,aAAe8E,EAAEmS,EAAK9R,EAAEnF,IAAM,OAEhFkjB,GACKzb,MAAK,OAAS,SAAStC,EAAEnF,EAAEwY,GAAI,MAAOlP,GAAMnE,EAAGqT,EAAGxY,KAClDyH,MAAK,SAAW,SAAStC,EAAEnF,EAAEwY,GAAI,MAAOlP,GAAMnE,EAAGqT,EAAGxY,KACpDrB,GAAE,YAAc,SAASwG,EAAEnF,GACxBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsb,kBACL1X,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,SAASwG,EAAEnF,GACvBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsF,iBACL1B,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAASmG,kBACLvC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,QAAU,SAASwG,EAAEnF,GACpB,GAAIojB,GAAUrlB,IACdL,GAASqG,cACLzC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5B1F,MAAOvE,GAAGuE,MACVqhB,QAASA,IAEb5lB,GAAGuE,MAAMshB,oBAEZ1kB,GAAE,WAAa,SAASwG,EAAEnF,GACvBtC,EAAS4lB,iBACLhiB,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,UAEhCjK,GAAGuE,MAAMshB,oBAEjBH,EACKve,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAI,kBAAoB,oBACzE2E,KAAI,YAAc,SAASQ,EAAEnF,GAAK,MAAO,aAAe8E,EAAEmS,EAAK9R,EAAEnF,IAAM,QAExEqvB,IACKxU,IAAUA,EAAWvZ,EAAKmQ,IAAI,WAAa,OAAO,KACvDyR,EACKzb,MAAK,OAAS,SAAStC,EAAEnF,EAAEwY,GAAK,MAAOhb,IAAG8xB,IAAID,EAASlqB,EAAEnF,IAAIuvB,OAAS1U,EAASpJ,IAAI,SAAStM,EAAEnF,GAAK,MAAOA,KAAKyJ,OAAO,SAAStE,EAAEnF,GAAI,OAAQ6a,EAAS7a,KAAOwY,IAAOvL,aACpKxF,MAAK,SAAW,SAAStC,EAAEnF,EAAEwY,GAAK,MAAOhb,IAAG8xB,IAAID,EAASlqB,EAAEnF,IAAIuvB,OAAS1U,EAASpJ,IAAI,SAAStM,EAAEnF,GAAK,MAAOA,KAAKyJ,OAAO,SAAStE,EAAEnF,GAAI,OAAQ6a,EAAS7a,KAAOwY,IAAOvL,aAG/K,IAAIuiB,GACAtM,EAAK1T,gBAAgBC,EAAa,WAAY/I,KAAKF,IAAI,IAAKf,IACvD2F,MAAM,SAASjG,EAAEnF,GACd,MAAOA,GAAIyF,EAAWnE,EAAK,GAAGuE,OAAOzG,QAE7CsvB,GACAc,EACK7qB,KAAI,IAAM,SAASQ,EAAEnF,EAAEwY,GACpB,GAAIsJ,GAAO,CAeX,OATQA,GAJHxgB,EAAKkX,GAAGsW,aAGLjQ,EAAK1Z,EAAEnF,GAAK,EACLuM,EAAE,GAELA,EAAE,GAAKA,EAAEsS,EAAK1Z,EAAEnF,IAAM,GACfuM,EAAE,GAAK,EAEPA,EAAEsS,EAAK1Z,EAAGnF,KAAO,EARzBuM,EAAEpH,EAAEylB,MAclBjmB,KAAI,SAAW,SAASQ,EAAEnF,EAAEwY,GACzB,MAAKlX,GAAKkX,GAAGsW,aAGFpoB,KAAKL,IAAIK,KAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,IAAK,IAAM,EAF9C7F,KAAKL,IAAIK,KAAKC,IAAI4F,EAAEpH,EAAEoH,EAAEpH,EAAE4d,IAAMxW,EAAEpH,EAAE4d,KAAM,KAKxDpe,KAAI,IAAM,SAASQ,EAAEnF,EAAEwY,GACpB,GAAIpU,GAAQ,CAOZ,OANI9C,GAAKkX,GAAGsW,eACR1qB,EAAQe,EAAEiE,OAAStE,EAAEpB,YAAcpC,EAAKlC,OACpCkC,EAAKlC,SAAWovB,IAChBpqB,EAAQ9C,EAAKkX,GAAGuW,mBAAqBjqB,EAAEpB,aAA+B,EAAlB8qB,KAGrDpqB,IAEVO,KAAI,QAAU,SAASQ,EAAEnF,EAAEwY,GACxB,GAAKlX,EAAKkX,GAAGsW,aAEN,CAEH,GAAI1qB,GAASU,EAAEpB,YAAc8qB,CAM7B,OAHIltB,GAAKlC,SAAWovB,IAChBpqB,EAAQU,EAAEpB,aAA+B,EAAlB8qB,IAEpBpqB,EATP,MAAOU,GAAEpB,cAcrB8rB,EACK7qB,KAAI,IAAM,SAASQ,EAAEnF,GAClB,MAAOmF,GAAEiE,OAAStE,EAAEpB,YAAcpC,EAAKlC,SAE1CuF,KAAI,QAAUG,EAAEpB,YAAcpC,EAAKlC,QACnCuF,KAAI,IAAM,SAASQ,EAAEnF,GAClB,MAAO6e,GAAK1Z,EAAEnF,GAAK,EACfuM,EAAE,GACEA,EAAE,GAAKA,EAAEsS,EAAK1Z,EAAEnF,IAAM,EAC1BuM,EAAE,GAAK,EACPA,EAAEsS,EAAK1Z,EAAEnF,KAAO,IAEvB2E,KAAI,SAAW,SAASQ,EAAEnF,GACvB,MAAO0G,MAAKL,IAAIK,KAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,IAAI,IAAM,IAKhEkR,EAAK3Y,EAAEgS,OACPiM,EAAKxW,EAAEuK,OAGHxV,EAAK,IAAMA,EAAK,GAAGuE,SACnBupB,EAAkB9tB,EAAK,GAAGuE,OAAOzG,UAKzCqQ,EAAYS,UAAS,sBAEdpQ,EA9WX,GAgBM+a,GAEA7D,EACAI,EACAF,EACAiB,EAUFsF,EAAIsF,EA/BJxgB,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,IACTS,EAAItH,GAAG8H,MAAMsI,UACbrB,EAAI/O,GAAG8H,MAAMC,SACbgC,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrBjE,EAAY,KACZ+S,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9B+R,GAAU,GACVG,GAAW,EACXiQ,GAAU,EACVG,EAAc,OACdvlB,EAAQtM,EAAGG,MAAMuQ,eACjB+gB,GAAW,EACXY,EAAW,KAEX5pB,EAAW,IAKXypB,EAAe,GACf3E,EAAc,IACd7sB,EAAWF,GAAGE,SAAQ,aAAe,eAAgB,kBAAmB,mBAAoB,kBAAmB,mBAAoB,aAQnI+R,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,GAG/C2pB,EAAkB,CAiYtB,OA/CAtvB,GAAMpC,SAAWA,EAEjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAU6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACjEtB,QAAU4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACnEb,GAAUmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IAC/D4G,GAAUN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAKlZ,IAC/DvC,QAAU6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IACzDuS,QAAUjM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IACzDqR,SAAU/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IACrEyR,SAAUnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IACrEuR,QAAUjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACnEwS,QAAUlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACnE2Y,QAAUrS,IAAK,WAAW,MAAOqS,IAAUpS,IAAK,SAASvG,GAAG2Y,EAAO3Y,IACnE+oB,SAAUziB,IAAK,WAAW,MAAOyiB,IAAWxiB,IAAK,SAASvG,GAAG+oB,EAAQ/oB,IACrEkpB,aAAc5iB,IAAK,WAAW,MAAO4iB,IAAe3iB,IAAK,SAASvG,GAAGkpB,EAAYlpB,IACjF8Y,UAAcxS,IAAK,WAAW,MAAOwS,IAAYvS,IAAK,SAASvG,GAAG8Y,EAAS9Y,IAC3EkV,UAAc5O,IAAK,WAAW,MAAO4O,IAAY3O,IAAK,SAASvG,GAAGkV,EAASlV,IAC3E4B,IAAc0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAC/D8oB,UAAcxiB,IAAK,WAAW,MAAOwiB,IAAYviB,IAAK,SAASvG,GAAG8oB,EAAS9oB,IAC3EupB,cAAcjjB,IAAK,WAAW,MAAOijB,IAAgBhjB,IAAK,SAASvG,GAAGupB,EAAavpB,IACnF4kB,aAActe,IAAK,WAAW,MAAOse,IAAere,IAAK,SAASvG,GAAG4kB,EAAY5kB,IAGjFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,KAEtB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9B0pB,UAAYpjB,IAAK,WAAW,MAAOojB,IAAYnjB,IAAK,SAASvG,GACzD0pB,EAAW1pB,EAAI3I,EAAGG,MAAMsQ,SAAS9H,GAAK,SAI9C3I,EAAGG,MAAMqP,YAAY1M,GAEdA,GC5aX9C,EAAGI,OAAOqyB,cAAgB,WACtB,YAsHA,SAAS3vB,GAAMsB,GA8TX,MA7TAqO,GAAYW,QACZX,EAAYrS,OAAOsyB,GACfxV,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAElCjZ,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAoBlE,IAlBAzC,EAAMqR,OAAS,WACM,IAAb1L,EACAvB,EAAU9F,KAAK0B,GAEfoE,EAAUiH,aACL1F,SAASA,GACTrH,KAAK0B,IAElBA,EAAMoE,UAAYnG,KAElBgR,EACKmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QAChCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,KAAKlI,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,SAItCP,EAAI4qB,EAAStsB,SACbmJ,EAAImjB,EAASxX,QAGb,IAAI5T,GAAOJ,EAAUK,UAAS,mCAAoCjD,MAAMA,IACpEkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,sCAAsCD,OAAM,KAC5FyQ,EAAI7Q,EAAKH,OAAM,IA6BnB,IA3BAqW,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,eACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,mBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBAG5Bkc,GAGDC,EAAO1c,MAAMzB,EAAiBgtB,KAE9Bxa,EAAEhR,OAAM,kBACHyW,MAAMtZ,GACNlD,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE4S,EAAEhR,OAAM,kBACHQ,KAAI,YAAc,aAAegrB,IAAiB,KAAQptB,EAAOE,IAAK,MAd3E0S,EAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,SAkBzC2b,EAEE,CACH,GAAIC,KACEzX,IAAKomB,EAAcC,SAAW,UAAWhV,SAAU6U,EAAShB,YAC5DllB,IAAKomB,EAAclB,SAAW,UAAW7T,UAAW6U,EAAShB,WAGnExN,GAAS9c,MAAMurB,KAAgBrmB,OAAK,OAAU,OAAQ,SACtD6L,EAAEhR,OAAM,oBACHyW,MAAMqG,GACNtc,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,KACjDrE,KAAK8iB,OAXT/L,GAAEhR,OAAM,oBAAqBI,UAAS,KAAMc,QAcjDf,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KACnEiY,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAI3D+sB,EACK7U,SAASvZ,EAAKmQ,IAAI,SAASrI,GAAU,MAAOA,GAAOyR,YACnDzW,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAG9C,IAAIF,GAAWxF,EAAEhR,OAAM,gBAClByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAK/C,IAHAF,EAASvc,KAAKsxB,GAGVxV,EAAW,CACXC,EACK7U,MAAMR,GACNid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAUlY,EAAiB,GAEhCuS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,KACvD0R,EAAEhR,OAAM,iBACH/F,KAAK+b,EAEV,IAAI/D,GAASjB,EAAEhR,OAAM,qBAAsBI,UAAS,IAMpD,IAJA6R,EACK7R,UAAS,cACTkD,MAAK,UAAY,GAElBiP,EAAe,CACf,GAAIoZ,GAAe,SAAShrB,EAAEyH,GAC1B,MAAO,aAAezH,EAAI,IAAMyH,EAAI,KAGpCwjB,EAAY,EAAGC,EAAc,EAEjC5Z,GACK7R,UAAS,QACTI,KAAI,YAAc,SAASQ,EAAEnF,EAAEwY,GAC5B,MAAQsX,GAAa,EAAItX,EAAI,GAAK,EAAIuX,EAAYC,IAG1D,IAAIC,GAAsBzyB,GAAG+G,UAAS,mCAAoC,GAAGnF,MAC7E+V,GAAE5Q,UAAS,qCACNI,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO8vB,GAAa,EAAU,IAAN9vB,GAAWiwB,EAAsB,IAAM,EAAKD,EAAcD,KAI1FpM,GACAxO,EAAE5Q,UAAS,cACNnG,KAAKpB,EAAGG,MAAM+W,UAAWpU,EAAMqa,MAAMzW,aAG1CwsB,GACA9Z,EACK3M,OAAO,SAAStE,EAAEnF,GACf,MAAOA,GAAI0G,KAAKyiB,KAAK7nB,EAAK,GAAGuE,OAAOzG,QAAUuD,EAAiB,QAAU,IAE5E4B,UAAS,cACTkD,MAAK,UAAY,GAEvB6O,GACCF,EACK7R,UAAS,cACTI,KAAI,YAAc,UAAY2R,EAAe,SAC7C7O,MAAK,cAAgB6O,EAAe,EAAI,QAAU,OAE3DnB,EAAEhR,OAAM,iBAAkBI,UAAS,wBAC9BkD,MAAK,UAAY,GAGtB2S,IACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAEhCwS,EAAEhR,OAAM,iBACH/F,KAAKic,IAIVgH,IACAC,EACKld,MAAMzB,GACN0B,OAAOzB,GACPL,QAAQC,KAAKD,EAAOC,KAAMC,IAAIF,EAAOE,MACrCmC,aAAaV,GACbd,OAAO0B,GACZR,EAAKH,OAAM,mBAAoB/F,KAAKkjB,IAOxCR,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAC1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAGV+P,EAASxjB,SAASiB,GAAE,cAAgB,SAASwG,EAAEnF,GAC3C,GAAKmF,EAAE0V,SAAP,CAOA,OANAoG,EAAeA,EAAaxP,IAAI,SAASwB,GAErC,MADAA,GAAE4H,UAAW,EACN5H,IAEX9N,EAAE0V,UAAW,EAEL1V,EAAEqE,KACN,IAAK,UACL,IAAKomB,GAAcC,QACfH,EAAShB,SAAQ,EACjB,MACJ,KAAK,UACL,IAAKkB,GAAclB,QACfgB,EAAShB,SAAQ,GAIzB3f,EAAM2f,QAAUgB,EAAShB,UACzBhxB,EAASoiB,YAAY/Q,GACrBjP,EAAMqR,YAIVzT,EAASiB,GAAE,cAAgB,SAASC,GACN,mBAAfA,GAAEic,WACTvZ,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAEjC+O,EAAM8L,SAAWjc,EAAEic,UAEE,mBAAdjc,GAAE8vB,UACTgB,EAAShB,QAAQ9vB,EAAE8vB,SACnB3f,EAAM2f,QAAU9vB,EAAE8vB,QAClBA,EAAU9vB,EAAE8vB,SAEhB5uB,EAAMqR,WAGNkQ,GACAC,EAAiB5jB,SAASiB,GAAE,mBAAqB,SAASC,GACtD,GAAqBiC,QAAjBjC,EAAE0E,YAAN,CAEA,GAAI2e,GAAa7C,EAAY8C,EAAgBI,EAAQH,IACrD7gB,GACKmI,OAAO,SAASL,EAAQpJ,GAErB,MADAoJ,GAAOqY,YAAczhB,GACboJ,EAAOyR,WAElB5T,QAAQ,SAASmC,EAAOpJ,GACrBof,EAAata,EAAEnB,SAASwM,QAAQvR,EAAE0E,YAElC,IAAIgJ,GAAQlD,EAAOvD,OAAOuZ,EACZve,UAAVyL,IAEJgW,EAAShW,EAAMxH,EACKjE,SAAhBohB,IAA2BA,EAAc3V,GACtBzL,SAAnBqhB,IAA8BA,EAAiBtjB,EAAE8C,QACrDygB,EAAQ1hB,MACJ+I,IAAKJ,EAAOI,IACZR,MAAOlJ,EAAMyM,IAAID,EAAO8S,GACxB9V,MAAOA,EAAMF,EAAOA,EAAOqY,aAC3BngB,KAAM8H,EAAOvD,OAAOuZ,QAIhCkC,EAAiBpkB,QACZoE,MACG0H,MAAOsZ,EACPlc,MAAOgZ,EACPhW,OAAQ+Y,MAGhBb,EAAiBre,gBAAgBif,MAGrCZ,EAAiB5jB,SAASiB,GAAE,kBAAmB,SAASC,GACpD0iB,EAAiBpkB,QAAQgG,QAAO,OAIpCwsB,EAAShyB,SAASiB,GAAE,2BAA6B,SAASoc,GACtDA,EAAI/R,MAAQlJ,EAAMgF,IAAIiW,EAAIzZ,MAC1ByZ,EAAW,QACPvR,IAAKuR,EAAIzZ,KAAKkI,IACdR,MAAOlJ,EAAMyM,IAAIwO,EAAIzZ,MACrBgI,MAAOyR,EAAIzR,OAEfpM,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7BwsB,EAAShyB,SAASiB,GAAE,0BAA4B,SAASoc,GACrD7d,EAAQgG,QAAO,KAGnBwsB,EAAShyB,SAASiB,GAAE,2BAA6B,SAASoc,GACtD7d,SAKZuS,EAAYS,UAAS,2BACdpQ,EA9aX,GAwBMgF,GACAyH,EAzBFmjB,EAAW1yB,EAAGI,OAAOmxB,WACnBpU,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAClBoM,EAAmBtkB,EAAGkE,uBACtB4f,EAAS9jB,EAAGI,OAAO0jB,SACnBI,EAAWlkB,EAAGI,OAAO0jB,SACrB5jB,EAAUF,EAAGI,OAAOF,UAGtBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZ3c,EAAQ,KACRC,EAAS,KACTiF,EAAQtM,EAAGG,MAAMuQ,eACjBsT,GAAe,EACf4O,KACA/O,GAAa,EACb3G,GAAY,EACZE,GAAY,EACZM,GAAkB,EAClBwV,GAAe,EACfxZ,GAAgB,EAChBiN,GAAa,EACbrN,EAAe,EAGfvH,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACfpM,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,cAAgB,cAAe,aACrDiyB,EAAe,WAAa,MAAO3O,GAAe,IAAM,GACxDvb,EAAW,IACX4b,GAA0B,CAGhCtS,GAAM2f,SAAU,EAEhBgB,EAAShB,SAAQ,GACjBvU,EACK9E,OAAM,UACNW,YAAY,GACZD,YAAW,GACXP,WAAW,SAASrQ,GAAK,MAAOA,KAErCkV,EACKhF,OAAM,EAAqB,QAAU,QACrCG,WAAWhY,GAAGmM,OAAM,SAGzBzM,EACKuI,SAAS,GACTgD,eAAe,SAAStD,EAAGnF,GACxB,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAEhC0I,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAGrCshB,EAAiBpkB,QACZuL,eAAe,SAAStD,EAAGnF,GACxB,MAAY,OAALmF,EAAY,MAAQkV,EAAM7E,aAAarQ,EAAGnF,KAEpD0I,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAGrCshB,EAAiBpkB,QACZuL,eAAe,SAAUtD,EAAGnF,GACzB,MAAY,OAALmF,EAAY,MAAQkV,EAAM7E,aAAarQ,EAAGnF,KAEpD0I,gBAAgB,SAAUvD,EAAGnF,GAC1B,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAGrCshB,EAAiBpkB,QACZuI,SAAS,GACTgD,eAAe,SAAStD,EAAGnF,GACxB,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAEhC0I,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAGrCkhB,EAASyB,aAAY,EAMrB,IAAIlT,GAAczS,EAAGG,MAAMsS,YAAY/R,GACnCgxB,GAAU,EAEVxO,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,WACzC6T,QAASA,KAKjBzO,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACUlO,SAAlBkO,EAAM2f,UACNA,EAAU3f,EAAM2f,SACC7tB,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,MA4YhD,OAjEAF,GAAMpC,SAAWA,EACjBoC,EAAM4vB,SAAWA,EACjB5vB,EAAMghB,OAASA,EACfhhB,EAAMohB,SAAWA,EACjBphB,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAMiP,MAAQA,EACdjP,EAAM5C,QAAUA,EAChB4C,EAAMwhB,iBAAmBA,EAEzBxhB,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9Eqb,cAAe/U,IAAK,WAAW,MAAO+U,IAAgB9U,IAAK,SAASvG,GAAGqb,EAAarb,IACpFiqB,eAAgB3jB,IAAK,WAAW,MAAO2jB,IAAiB1jB,IAAK,SAASvG,GAAGiqB,EAAcjqB,IACvFuU,WAAiBjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAChFyU,WAAenO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC9Ewa,cAAkBlU,IAAK,WAAW,MAAOkU,IAAgBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IACvFoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IACrEuqB,cAAkBjkB,IAAK,WAAW,MAAOikB,IAAgBhkB,IAAK,SAASvG,GAAGuqB,EAAavqB,IACvF2Q,cAAkBrK,IAAK,WAAW,MAAOqK,IAAgBpK,IAAK,SAASvG,GAAG2Q,EAAa3Q,IACvF+Q,eAAmBzK,IAAK,WAAW,MAAOyK,IAAiBxK,IAAK,SAASvG,GAAG+Q,EAAc/Q,IAC1Fge,YAAe1X,IAAK,WAAW,MAAO0X,IAAczX,IAAK,SAASvG,GAAGge,IAAahe,IAGlFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX+pB,EAASjqB,SAASA,GAClB0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,GACfgK,EAAYW,MAAM3K,KAEtB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,KAEjBoR,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQqF,EAAkB,QAAU,UAE9C2G,yBAA0BpV,IAAK,WAAW,MAAOoV,IAA2BnV,IAAK,SAASvG,GACtF0b,EAA0B1b,IAE9B0pB,UAAYpjB,IAAK,WAAW,MAAOyjB,GAASL,UAAYnjB,IAAK,SAASvG,GAClE+pB,EAASL,SAAS1pB,GAClBmb,EAAOxX,MAAM,SAASnE,EAAEnF,GAAI,MAAOxC,IAAG8xB,IAAG,QAASC,OAAW,IAAJvvB,GAASiN,iBAI1EjQ,EAAGG,MAAMkW,eAAevT,EAAO4vB,GAC/B1yB,EAAGG,MAAMqP,YAAY1M,GAEdA,GC7fX9C,EAAGI,OAAO+yB,mBAAqB,WAC3B,YA2CA,SAASrwB,GAAMsB,GAkQX,MAjQAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD3P,GAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,GAEbwqB,IACAptB,EAAO9D,GAAGmoB,OAAOiJ,QACZviB,OAAM,QACNxG,OAAO,SAASV,GAAI,MAAOA,GAAEU,SAC7B0G,EAAEsS,GACNvd,IAGLA,EAAK2F,QAAQ,SAASmC,EAAQpJ,GAC1BoJ,EAAOvD,OAAOoB,QAAQ,SAASqF,GAC3BA,EAAMlD,OAASpJ,EACfsM,EAAM9C,IAAMJ,EAAOI,QAKvBklB,GACAptB,EAAK,GAAGuE,OAAO4L,IAAI,SAAStM,EAAEnF,GAC1B,GAAIgvB,GAAU,EAAGC,EAAU,CAC3B3tB,GAAKmQ,IAAI,SAAStM,GACd,GAAIuT,GAAIvT,EAAEU,OAAO7F,EACjB0Y,GAAEhM,KAAOhG,KAAKC,IAAI+R,EAAEnM,GAChBmM,EAAEnM,EAAE,GACJmM,EAAEkS,GAAKqE,EAAUvW,EAAEhM,KACnBuiB,GAAoBvW,EAAEhM,OAGtBgM,EAAEkS,GAAKoE,EACPA,GAAoBtW,EAAEhM,SAOtC,IAAIoW,GAAc9L,GAAWI,KACzB9V,EAAKmQ,IAAI,SAAStM,GACd,MAAOA,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,EAAEnF,GAAIuM,EAAGsS,EAAK1Z,EAAEnF,GAAI+iB,GAAI5d,EAAE4d,GAAI6H,GAAIzlB,EAAEylB,OAIjE9lB,GAAEnB,OAAOqT,GAAWxZ,GAAGmf,MAAMmG,GAAYrR,IAAI,SAAStM,GAAK,MAAOA,GAAEL,KAC/DzB,WAAW6T,IAAW,EAAGtU,GAAkBssB,GAEhD3iB,EAAE5I,OAAOyT,GAAW5Z,GAAGkf,OAAOlf,GAAGmf,MAAMmG,GAAYrR,IAAI,SAAStM,GAAK,MAAOupB,GAAWvpB,EAAEoH,EAAI,EAAIpH,EAAEylB,GAAKzlB,EAAEoH,EAAIpH,EAAEylB,GAAOzlB,EAAEoH,IAAK7N,OAAO4f,KAEjI0E,IAAe0L,EACfniB,EAAE9I,MAAM0U,IAAY5L,EAAE5I,SAAS,GAAK,EAAIysB,EAAe,EAAIztB,GAAkB4J,EAAE5I,SAAS,GAAK,EAAIysB,EAAe,KAEhH7jB,EAAE9I,MAAM0U,IAAW,EAAGxV,IAE1B8a,EAAKA,GAAM3Y,EACXie,EAAKA,GAAMvlB,GAAG8H,MAAMC,SAAS5B,OAAO4I,EAAE5I,UAAUF,OAAO8I,EAAE,GAAGA,EAAE,IAG9D,IAAIjI,GAAO9G,GAAG2G,OAAOpG,MAAMwG,UAAS,mCAAoCjD,MAAMA,IAC1EkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,sCAEnD6V,GADYhW,EAAUE,OAAM,QACnBF,EAAUE,OAAM,KACrBJ,GAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aACjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAEvE,IAAIwgB,GAAS3e,EAAKH,OAAM,cAAeI,UAAS,aAC3CjD,KAAK,SAAS6D,GAAK,MAAOA,IAAK,SAASA,EAAEnF,GAAK,MAAOA,IAC3DijB,GAAOxe,QAAQC,OAAM,KAChB+C,MAAK,iBAAmB,MACxBA,MAAK,eAAiB,MAC3Bwb,EAAO7d,OAAOoK,gBAAgBC,EAAa,mCACtChI,MAAK,iBAAmB,MACxBA,MAAK,eAAiB,MACtBpC,SACL4d,EACKte,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO,sBAAwBA,IAC7D0H,QAAO,QAAU,SAASvC,GAAK,MAAOA,GAAEsT,QACxChR,MAAK,OAAS,SAAStC,EAAEnF,GAAI,MAAOsJ,GAAMnE,EAAGnF,KAC7CyH,MAAK,SAAW,SAAStC,EAAEnF,GAAI,MAAOsJ,GAAMnE,EAAGnF,KACpDijB,EAAOzT,gBAAgBC,EAAa,8BAC/BhI,MAAK,iBAAmB,GACxBA,MAAK,eAAiB8iB,EAE3B,IAAIrH,GAAOD,EAAO1e,UAAS,YACtBjD,KAAK,SAAS6D,GAAK,MAAOA,GAAEU,QACjCqd,GAAK9d,OAAOC,QAEZ,IAAI8d,GAAYD,EAAKze,QAAQC,OAAM,KAC9BC,KAAI,YAAc,SAASQ,EAAEnF,EAAEwY,GAC5B,MAAO,aAAeuK,EAAG2L,EAAUvpB,EAAE4d,GAAK,GAAK,KAAO2L,EAAU,EAAKlW,EAAI1T,EAAEpB,YAAcpC,EAAKlC,OAAW0F,EAAEmS,EAAK9R,EAAEnF,KAAO,KAGjImjB,GAAUze,OAAM,QACXC,KAAI,QAAU,GACdA,KAAI,SAAWG,EAAEpB,aAAegrB,EAAU,EAAIptB,EAAKlC,SAExD8jB,EACKvkB,GAAE,YAAc,SAASwG,EAAEnF,GACxBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsb,kBACL1X,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,SAASwG,EAAEnF,GACvBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASsF,iBACL1B,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,WAAa,SAASwG,EAAEnF,GACvBtC,EAASsF,iBACL1B,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAASmG,kBACLvC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAGnC9I,GAAE,QAAU,SAASwG,EAAEnF,GACpB,GAAIojB,GAAUrlB,IACdL,GAASqG,cACLzC,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5B1F,MAAOvE,GAAGuE,MACVqhB,QAASA,IAEb5lB,GAAGuE,MAAMshB,oBAEZ1kB,GAAE,WAAa,SAASwG,EAAEnF,GACvBtC,EAAS4lB,iBACLhiB,KAAM6D,EACNiB,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,UAEhCjK,GAAGuE,MAAMshB,oBAGbgN,EAAQ/uB,EAAK,GAAG,KAChB6hB,EAAUze,OAAM,YAEhBwe,EAAK/e,OAAM,YACNQ,KAAI,OAAS,QACbA,KAAI,SAAW,SAASQ,EAAEnF,GACvB,GAAIswB,GAAOD,EAAQlrB,EAAEnF,GACfuwB,EAAM,GAAMzrB,EAAEpB,aAA6C,GAA7BgrB,EAAU,EAAIptB,EAAKlC,QACvDkxB,GAAOA,EAAKlxB,OAASkxB,IAAS5pB,KAAKC,IAAI2pB,GAAO5pB,KAAKC,IAAI2pB,IACvDA,EAAOA,EAAK7e,IAAI,SAAS7S,GAAK,MAAO2N,GAAE3N,GAAK2N,EAAE,IAC9C,IAAIO,KAAMwjB,EAAK,IAAIC,IAAOD,EAAK,GAAGC,IAAOD,EAAK,GAAG,IAAKA,EAAK,GAAG,IAAKA,EAAK,IAAIC,IAAOD,EAAK,GAAGC,GAC3F,OAAOzjB,GAAE2E,IAAI,SAAU+e,GAAQ,MAAOA,GAAK5b,KAAI,OAASA,KAAI,OAE/DjQ,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,GAAIuwB,GAAMzrB,EAAEpB,aAA6C,GAA7BgrB,EAAU,EAAIptB,EAAKlC,QAC/C,OAAO,cAAgByf,EAAK1Z,EAAEnF,GAAK,EAAI,EAAIuM,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,IAAM,KAAOgkB,EAAM,OAI3FpN,EAAUze,OAAM,QAEZse,IAAe0L,GACfxL,EAAK/e,OAAM,QACNQ,KAAI,cAAgB,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAI,MAAQ,UACnE2E,KAAI,IAAMG,EAAEpB,aAA6B,EAAdpC,EAAKlC,SAChCuF,KAAI,KAAO,SACXuK,KAAK,SAAS/J,EAAEnF,GACb,GAAIgT,GAAIuQ,EAAY1E,EAAK1Z,EAAEnF,IACrBywB,EAAOJ,EAAQlrB,EAAEnF,EACvB,OAAaa,UAAT4vB,EACOzd,EACNyd,EAAKrxB,OAEH4T,EAAI,IAAMuQ,EAAY7c,KAAKC,IAAI8pB,EAAK,KAAO,IAAMlN,EAAY7c,KAAKC,IAAI8pB,EAAK,KADvEzd,EAAI,IAAMuQ,EAAY7c,KAAKC,IAAI8pB,MAGlDvN,EAAK1T,gBAAgBC,EAAa,4BAC7BtL,OAAM,QACNQ,KAAI,IAAM,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAI,GAAKuM,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,GAAK,KAEjF2W,EAAK3e,UAAS,QAAS2K,KAAI,IAG3BwhB,IAAkBhC,GAClBvL,EAAUze,OAAM,QAASgD,QAAO,gBAAgB,GAChDwb,EAAK/e,OAAM,qBACNQ,KAAI,cAAgB,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAI,QAAU,QACrE2E,KAAI,IAAMG,EAAEpB,aAA6B,EAAdpC,EAAKlC,SAChCuF,KAAI,KAAO,SACXuK,KAAK,SAAS/J,EAAEnF,GAAK,MAAOiX,GAAK9R,EAAEnF,KACxCkjB,EAAK1T,gBAAgBC,EAAa,4BAC7BtL,OAAM,qBACNQ,KAAI,IAAM,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAIuM,EAAE,GAAKA,EAAEsS,EAAK1Z,EAAEnF,IAAM,EAAI,MAGhFkjB,EAAK3e,UAAS,qBAAsB2K,KAAI,IAG5CgU,EACKve,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,GAAK,EAAI,kBAAoB,oBAE1EqvB,IACKxU,IAAUA,EAAWvZ,EAAKmQ,IAAI,WAAa,OAAO,KACvDyR,EACKzb,MAAK,OAAS,SAAStC,EAAEnF,EAAEwY,GAAK,MAAOhb,IAAG8xB,IAAID,EAASlqB,EAAEnF,IAAIuvB,OAAS1U,EAASpJ,IAAI,SAAStM,EAAEnF,GAAK,MAAOA,KAAKyJ,OAAO,SAAStE,EAAEnF,GAAI,OAAQ6a,EAAS7a,KAAOwY,IAAOvL,aACpKxF,MAAK,SAAW,SAAStC,EAAEnF,EAAEwY,GAAK,MAAOhb,IAAG8xB,IAAID,EAASlqB,EAAEnF,IAAIuvB,OAAS1U,EAASpJ,IAAI,SAAStM,EAAEnF,GAAK,MAAOA,KAAKyJ,OAAO,SAAStE,EAAEnF,GAAI,OAAQ6a,EAAS7a,KAAOwY,IAAOvL,cAG3KyhB,EACAxL,EAAK1T,gBAAgBC,EAAa,4BAC7B9K,KAAI,YAAc,SAASQ,EAAEnF,GAC1B,MAAO,aAAeuM,EAAEpH,EAAEylB,IAAM,IAAM9lB,EAAEmS,EAAK9R,EAAEnF,IAAM,MAExDmE,OAAM,QACNQ,KAAI,QAAU,SAASQ,EAAEnF,GACtB,MAAO0G,MAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,GAAKmF,EAAE4d,IAAMxW,EAAEpH,EAAE4d,MAAQ,IAErDpe,KAAI,SAAWG,EAAEpB,aAEtBwf,EAAK1T,gBAAgBC,EAAa,4BAC7B9K,KAAI,YAAc,SAASQ,EAAEnF,GAE1B,MAAO,aACcuM,EAAhBsS,EAAK1Z,EAAEnF,GAAK,EAAM6e,EAAK1Z,EAAEnF,GAAQ,GAChC,KACDmF,EAAEiE,OAAStE,EAAEpB,YAAcpC,EAAKlC,OAE7B0F,EAAEmS,EAAK9R,EAAEnF,KACX,MAETmE,OAAM,QACNQ,KAAI,SAAWG,EAAEpB,YAAcpC,EAAKlC,QACpCuF,KAAI,QAAU,SAASQ,EAAEnF,GACtB,MAAO0G,MAAKL,IAAIK,KAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,IAAI,IAAM,IAIhEkR,EAAK3Y,EAAEgS,OACPiM,EAAKxW,EAAEuK,SAIXrH,EAAYS,UAAS,gCACdpQ,EAvSX,GAaM+a,GASA7D,EACAI,EACAF,EACAiB,EASFsF,EAAIsF,EAlCJxgB,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,IACTkD,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrBjE,EAAY,KACZY,EAAItH,GAAG8H,MAAMsI,UACbrB,EAAI/O,GAAG8H,MAAMC,SACb0R,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9B8jB,EAAU,SAASlrB,GAAK,MAAOA,GAAEwrB,MACjCrS,GAAU,GACVhV,EAAQtM,EAAGG,MAAMuQ,eACjB2hB,EAAW,KAEXX,GAAU,EACV1L,GAAa,EACb0N,GAAgB,EAChBN,EAAe,GACflB,EAAe,GACf3E,EAAc,IACdhH,EAAc/lB,GAAGmM,OAAM,QAMvBlE,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,aAAe,eAAgB,kBAAmB,mBAAoB,kBAAmB,mBAAoB,aAQrI+R,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EA6TjD,OAlDA3F,GAAMpC,SAAWA,EAEjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAU6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACjEtB,QAAU4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACnEb,GAAUmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IAC/D4G,GAAUN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAKlZ,IAC/DgrB,MAAa1kB,IAAK,WAAW,MAAOokB,IAAWnkB,IAAK,SAASvG,GAAG0qB,EAAQ1qB,IACxEvC,QAAU6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IACzDuS,QAAUjM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IACzDqR,SAAU/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IACrEyR,SAAUnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IACrEuR,QAAUjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACnEwS,QAAUlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACnE2Y,QAAUrS,IAAK,WAAW,MAAOqS,IAAUpS,IAAK,SAASvG,GAAG2Y,EAAO3Y,IACnE+oB,SAAUziB,IAAK,WAAW,MAAOyiB,IAAWxiB,IAAK,SAASvG,GAAG+oB,EAAQ/oB,IACrEqd,YAAa/W,IAAK,WAAW,MAAO+W,IAAc9W,IAAK,SAASvG,GAAGqd,EAAWrd,IAG9EkV,UAAe5O,IAAK,WAAW,MAAO4O,IAAY3O,IAAK,SAASvG,GAAGkV,EAASlV,IAC5E4B,IAAe0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAChE4d,aAAetX,IAAK,WAAW,MAAOsX,IAAerX,IAAK,SAASvG,GAAG4d,EAAY5d,IAClFyqB,cAAenkB,IAAK,WAAW,MAAOmkB,IAAgBlkB,IAAK,SAASvG,GAAGyqB,EAAazqB,IACpFupB,cAAejjB,IAAK,WAAW,MAAOijB,IAAgBhjB,IAAK,SAASvG,GAAGupB,EAAavpB,IACpF4kB,aAAete,IAAK,WAAW,MAAOse,IAAere,IAAK,SAASvG,GAAG4kB,EAAY5kB,IAGlFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,KAEtB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9B0pB,UAAYpjB,IAAK,WAAW,MAAOojB,IAAYnjB,IAAK,SAASvG,GACzD0pB,EAAW1pB,EAAI3I,EAAGG,MAAMsQ,SAAS9H,GAAK,SAI9C3I,EAAGG,MAAMqP,YAAY1M,GAEdA,GCvWX9C,EAAGI,OAAOwzB,wBAA0B,WAChC,YAuFA,SAAS9wB,GAAMsB,GA6MX,MA5MAqO,GAAYW,QACZX,EAAYrS,OAAOsyB,GACfxV,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAElCjZ,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,EACjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAelE,IAbAzC,EAAMqR,OAAS,WAAajN,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAC3EA,EAAMoE,UAAYnG,KAElB2wB,EAAUgB,EAAShB,UAEnB3f,EACKmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QAChCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,KAAKlI,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,SAItCP,EAAI4qB,EAAStsB,SACbmJ,EAAImjB,EAASxX,SAASqC,OAAM,EAG5B,IAAIjW,GAAOJ,EAAUK,UAAS,wCAAyCjD,MAAMA,IACzEkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,2CAA2CD,OAAM,KACjGyQ,EAAI7Q,EAAKH,OAAM,IA8BnB,IA5BAqW,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAC5BD,OAAM,KAAMC,KAAI,QAAU,eAC1BD,OAAM,QACX8V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,eACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,mBAG5Bkc,GAGDC,EAAO1c,MAAMzB,EAAiBgtB,KAE9Bxa,EAAEhR,OAAM,kBACHyW,MAAMtZ,GACNlD,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE4S,EAAEhR,OAAM,kBACHQ,KAAI,YAAc,aAAegrB,IAAiB,KAAQptB,EAAOE,IAAK,MAd3E0S,EAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,SAkBzC2b,EAEE,CACH,GAAIC,KACEzX,IAAKomB,EAAcC,SAAW,UAAWhV,SAAU6U,EAAShB,YAC5DllB,IAAKomB,EAAclB,SAAW,UAAW7T,UAAW6U,EAAShB,WAGnExN,GAAS9c,MAAMurB,KAAgBrmB,OAAK,OAAU,OAAQ,SACtD6L,EAAEhR,OAAM,oBACHyW,MAAMqG,GACNtc,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,KACjDrE,KAAK8iB,OAXT/L,GAAEhR,OAAM,oBAAqBI,UAAS,KAAMc,QAcjDf,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAGvEitB,EACK7U,SAASvZ,EAAKmQ,IAAI,SAASrI,GAAU,MAAOA,GAAOyR,YACnDzW,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAE9C,IAAIF,GAAWxF,EAAEhR,OAAM,gBAClByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAK/C,IAHAF,EAASxP,aAAa/M,KAAKsxB,GAGvBxV,EAAW,CACXC,EACK7U,MAAMR,GACNid,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAUnY,EAAgB,GAE/BwS,EAAEhR,OAAM,iBAAkB/F,KAAK+b,EAE/B,IAAI/D,GAASjB,EAAEhR,OAAM,iBAAkBI,UAAS,IAEhD6R,GACK7R,UAAS,cAGd6V,IACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAWlY,EAAiB,GAEjCuS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB/B,EAAkB,KAC1DuS,EAAEhR,OAAM,iBAAkB/F,KAAKic,IAInClF,EAAEhR,OAAM,qBACHQ,KAAI,KAAO4H,EAAE,IACb5H,KAAI,KAAO4H,EAAE,IACb5H,KAAI,KAAO,GACXA,KAAI,MAAQ/B,GAOjBke,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAC1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAGV+P,EAASxjB,SAASiB,GAAE,cAAgB,SAASwG,EAAEnF,GAC3C,GAAKmF,EAAE0V,SAAP,CAOA,OANAoG,EAAeA,EAAaxP,IAAI,SAASwB,GAErC,MADAA,GAAE4H,UAAW,EACN5H,IAEX9N,EAAE0V,UAAW,EAEL1V,EAAEqE,KACN,IAAK,UACL,IAAKomB,GAAcC,QACfH,EAAShB,SAAQ,EACjB,MACJ,KAAK,UACL,IAAKkB,GAAclB,QACfgB,EAAShB,SAAQ,GAIzB3f,EAAM2f,QAAUgB,EAAShB,UACzBhxB,EAASoiB,YAAY/Q,GACrB2f,EAAUgB,EAAShB,UAEnB5uB,EAAMqR,YAIVzT,EAASiB,GAAE,cAAgB,SAASC,GAEN,mBAAfA,GAAEic,WACTvZ,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAGjC+O,EAAM8L,SAAWjc,EAAEic,UAGE,mBAAdjc,GAAE8vB,UACTgB,EAAShB,QAAQ9vB,EAAE8vB,SACnB3f,EAAM2f,QAAU9vB,EAAE8vB,QAClBA,EAAU9vB,EAAE8vB,SAGhB5uB,EAAMqR,aAGd1B,EAAYS,UAAS,uCACdpQ,EA9RX,GAmBMgF,GACAyH,EApBFmjB,EAAW1yB,EAAGI,OAAO+yB,qBACnBhW,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAClB4L,EAAS9jB,EAAGI,OAAO0jB,SAASzc,OAAO,IACnC6c,EAAWlkB,EAAGI,OAAO0jB,SAASzc,OAAO,IACrCnH,EAAUF,EAAGI,OAAOF,UAGtBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZ3c,EAAQ,KACRC,EAAS,KACTiF,EAAQtM,EAAGG,MAAMuQ,eACjBsT,GAAe,EACf4O,KACA/O,GAAa,EACb3G,GAAY,EACZE,GAAY,EACZsU,GAAU,EAGV3f,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACfpM,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,cAAgB,cAAY,aAClDiyB,EAAe,WAAa,MAAO3O,GAAe,IAAM,GACxDvb,EAAW,GAGjBsJ,GAAM2f,SAAU,EAEhBgB,EAAShB,QAAQA,GAEjBvU,EACK9E,OAAM,QACNW,YAAY,GACZD,YAAW,GACXP,WAAW,SAASrQ,GAAK,MAAOA,KAErCkV,EACKhF,OAAM,UACNG,WAAWhY,GAAGmM,OAAM,SAGzBzM,EACKuI,SAAS,GACTgD,eAAe,SAAStD,EAAGnF,GACxB,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAEhC0I,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAGrCkhB,EAASyB,aAAY,EAMrB,IAAIzC,GAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,WACzC6T,QAASA,KAKjBzO,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACUlO,SAAlBkO,EAAM2f,UACNA,EAAU3f,EAAM2f,SACC7tB,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,OAK5CyP,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EAkSjD,OA5EAiqB,GAAShyB,SAASiB,GAAE,2BAA6B,SAASoc,GACtDA,EAAI/R,MAAQlJ,EAAMgF,IAAIiW,EAAIzZ,MAC1ByZ,EAAW,QACPvR,IAAKuR,EAAIzZ,KAAKkI,IACdR,MAAOlJ,EAAMyM,IAAIwO,EAAIzZ,MACrBgI,MAAOyR,EAAIzR,OAEfpM,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7BwsB,EAAShyB,SAASiB,GAAE,0BAA4B,SAASoc,GACrD7d,EAAQgG,QAAO,KAGnBwsB,EAAShyB,SAASiB,GAAE,2BAA6B,SAASoc,GACtD7d,MAQJ4C,EAAMpC,SAAWA,EACjBoC,EAAM4vB,SAAWA,EACjB5vB,EAAMghB,OAASA,EACfhhB,EAAMohB,SAAWA,EACjBphB,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAMiP,MAAQA,EACdjP,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9Eqb,cAAe/U,IAAK,WAAW,MAAO+U,IAAgB9U,IAAK,SAASvG,GAAGqb,EAAarb,IACpFiqB,eAAgB3jB,IAAK,WAAW,MAAO2jB,IAAiB1jB,IAAK,SAASvG,GAAGiqB,EAAcjqB,IACvFuU,WAAiBjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAChFyU,WAAenO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC9Ewa,cAAkBlU,IAAK,WAAW,MAAOkU,IAAgBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IACvFoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAGrEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClBiqB,EAASjqB,SAASA,GAClB0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,KAEnB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,KAEjB+lB,UAAYpjB,IAAK,WAAW,MAAOyjB,GAASL,UAAYnjB,IAAK,SAASvG,GAClE+pB,EAASL,SAAS1pB,GAClBmb,EAAOxX,MAAM,SAASnE,EAAEnF,GAAI,MAAOxC,IAAG8xB,IAAG,QAASC,OAAW,IAAJvvB,GAASiN,iBAI1EjQ,EAAGG,MAAMkW,eAAevT,EAAO4vB,GAC/B1yB,EAAGG,MAAMqP,YAAY1M,GAEdA,GCzXX9C,EAAGI,OAAOyzB,WAAa,WACnB,YAuDA,SAAS/wB,GAAMsB,GAmbX,MAlbAA,GAAUC,KAAK,SAASC,GA4OpB,QAASwvB,GAAe/V,GACpB,GAAIgW,GAAwC,IAAhCzvB,EAAKyZ,EAAI0G,aAAapH,MAAc2W,EAASC,CACzDlW,GAAI/R,MAAQ+R,EAAIzO,MAAMxH,EACtBiW,EAAI3R,QACAJ,MAAO+R,EAAIzO,MAAMC,EACjBjD,MAAOyR,EAAIzO,MAAMhD,MACjBE,IAAKuR,EAAI3R,OAAOI,KAEpBtM,EACKuI,SAAS,GACTiD,gBAAgB,SAASvD,EAAGnF,GAC5B,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAE7ByI,eAAe,SAAStD,EAAGnF,GACxB,MAAO+wB,GAAMvb,aAAarQ,EAAGnF,KAEhCsB,KAAKyZ,GACL7X,QAAO,GAGhB,QAASguB,GAAkBnW,GACvB,GAAIgW,GAAwC,IAAhCzvB,EAAKyZ,EAAI0G,aAAapH,MAAc2W,EAASC,CACzDlW,GAAI/R,MAAQ+R,EAAIzO,MAAMxH,EACtBiW,EAAI3R,QACAJ,MAAO+R,EAAIzO,MAAMC,EACjBjD,MAAOyR,EAAIzO,MAAMhD,MACjBE,IAAKuR,EAAI3R,OAAOI,KAEpBtM,EACKuI,SAAS,KACTiD,gBAAgB,SAASvD,EAAGnF,GAC5B,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAE7ByI,eAAe,SAAStD,EAAGnF,GACxB,MAAO+wB,GAAMvb,aAAarQ,EAAGnF,KAEhCsB,KAAKyZ,GACL7X,QAAO,GAGhB,QAASiuB,GAAgBpW,GACrB,GAAIgW,GAAwC,IAAhCzvB,EAAKyZ,EAAI0G,aAAapH,MAAc2W,EAASC,CACzDlW,GAAIzO,MAAQ,EAAK8kB,EAAOtsB,IAAIiW,EAAIzO,OAChCyO,EAAIzO,MAAQ,EAAK8kB,EAAO7kB,IAAIwO,EAAIzO,OAChCpP,EACKuI,SAAS,GACTiD,gBAAgB,SAASvD,EAAGnF,GAC5B,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAE7ByI,eAAe,SAAStD,EAAGnF,GACxB,MAAO+wB,GAAMvb,aAAarQ,EAAGnF,KAEhCsB,KAAKyZ,GACL7X,QAAO,GAGhB,QAASmuB,GAActW,GACnB,GAAIgW,GAAwC,IAAhCzvB,EAAKyZ,EAAIzZ,KAAK8H,QAAQiR,MAAc2W,EAASC,CAEzDlW,GAAI/R,MAAQsoB,EAAMxsB,IAAIiW,EAAIzZ,MAC1ByZ,EAAW,QACP/R,MAAOsoB,EAAM/kB,IAAIwO,EAAIzZ,MACrBgI,MAAOyR,EAAIzR,MACXE,IAAKuR,EAAIzZ,KAAKkI,KAElBtM,EACKuI,SAAS,GACTiD,gBAAgB,SAASvD,EAAGnF,GAC5B,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAE7ByI,eAAe,SAAStD,EAAGnF,GACxB,MAAO+wB,GAAMvb,aAAarQ,EAAGnF,KAEhCsB,KAAKyZ,GACL7X,QAAO,GAKhB,QAASoc,KACP,IAAI,GAAItf,GAAE,EAAG8c,EAAGzf,EAAO+B,OAAY0d,EAAJ9c,EAAQA,IAAC,CACtC,GAAIF,GAAQzC,EAAO2C,EACnB,KACEF,EAAMwf,kBACN,MAAM1gB,MAIZ,QAASugB,GAAeoS,EAAYnS,EAAY/D,GAC9C,IAAI,GAAIrb,GAAE,EAAG8c,EAAGzf,EAAO+B,OAAY0d,EAAJ9c,EAAQA,IAAC,CACtC,GAAIF,GAAQzC,EAAO2C,EACnB,KACEF,EAAMqf,eAAeoS,EAAYnS,EAAY/D,GAC7C,MAAMzc,MAxUZ,GAAIsF,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,GAEjBpE,EAAMqR,OAAS,WAAajN,EAAUiH,aAAa/M,KAAK0B,IACxDA,EAAMoE,UAAYnG,IAElB,IAAI4E,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,GAE9DivB,EAAalwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,QAAVA,EAAEzC,MAA6B,GAAXyC,EAAEkV,QACnEoX,EAAanwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,QAAVA,EAAEzC,MAA6B,GAAXyC,EAAEkV,QACnEqX,EAAgBpwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,WAAVA,EAAEzC,MAAgC,GAAXyC,EAAEkV,QACzEsX,EAAgBrwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,WAAVA,EAAEzC,MAAgC,GAAXyC,EAAEkV,QACzEuX,EAAatwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,OAAVA,EAAEzC,MAA6B,GAAXyC,EAAEkV,QACnEwX,EAAavwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,OAAVA,EAAEzC,MAA6B,GAAXyC,EAAEkV,QACnEyX,EAAaxwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,QAAVA,EAAEzC,MAA6B,GAAXyC,EAAEkV,QACnE0X,EAAazwB,EAAKmI,OAAO,SAAStE,GAAI,MAAiB,QAAVA,EAAEzC,MAA6B,GAAXyC,EAAEkV,OAGvE,MAAK/Y,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,QAGtC,IAAIooB,GAAUnsB,EAAKmI,OAAO,SAAStE,GAAI,OAAQA,EAAE0V,UAAuB,GAAX1V,EAAEkV,QAC1D5I,IAAI,SAAStM,GACV,MAAOA,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,GAAIoH,EAAGsS,EAAK1Z,QAIrCuoB,EAAUpsB,EAAKmI,OAAO,SAAStE,GAAI,OAAQA,EAAE0V,UAAuB,GAAX1V,EAAEkV,QAC1D5I,IAAI,SAAStM,GACV,MAAOA,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,GAAIoH,EAAGsS,EAAK1Z,OAIzCL,GAAKnB,OAAOnG,GAAGkf,OAAOlf,GAAGmf,MAAM8Q,EAAQ/uB,OAAOgvB,IAAW,SAASvoB,GAAK,MAAOA,GAAEL,KAC3ErB,OAAO,EAAGd,GAEf,IAAI2B,GAAOJ,EAAUK,UAAS,qBAAsBjD,MAAMA,IACtDkZ,GAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,wBAAwBD,OAAM,IAElF8V,IAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,cACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,cACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,cACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,cACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,cACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,iBAEjC,IAAIwQ,IAAI7Q,EAAKH,OAAM,KAEf6tB,GAAc1wB,EAAKmQ,IAAI,SAAStM,EAAEnF,GAClC,MAAOsB,GAAKtB,GAAGsJ,OAASA,EAAMnE,EAAGnF,IAIrC,IAAK6gB,EAEE,CACH,GAAI6H,IAAc5H,EAAOkH,QAAUrlB,EAAiB,EAAIA,EACpDirB,GAAkB9M,EAAOkH,QAAUU,GAAc,CAErD5H,GAAO1c,MAAMskB,IACb5H,EAAOxX,MAAM0oB,IAEb7c,GAAEhR,OAAM,eACHyW,MAAMtZ,EAAKmQ,IAAI,SAASrI,GAGrB,MAFAA,GAAOykB,YAAqChtB,SAAvBuI,EAAOykB,YAA4BzkB,EAAOI,IAAMJ,EAAOykB,YAC5EzkB,EAAOI,IAAMJ,EAAOykB,aAA+B,GAAhBzkB,EAAOiR,MAAa,GAAKyT,GACrD1kB,KAEVhL,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE4S,GAAEhR,OAAM,eACHQ,KAAI,YAAc,aAAeipB,GAAkB,KAAQrrB,EAAOE,IAAK,SAtB5E0S,IAAEhR,OAAM,eAAgBI,UAAS,KAAMc,QAyB3C4sB,GACK7tB,MAAMzB,GACN0B,OAAOzB,GACPqiB,YAAYA,GACZ3b,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,QAAhB/Y,EAAKtB,GAAG0C,QACxG4qB,EACKlpB,MAAMzB,GACN0B,OAAOzB,GACPqiB,YAAYA,GACZ3b,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,QAAhB/Y,EAAKtB,GAAG0C,QACxGwvB,EACK9tB,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,WAAhB/Y,EAAKtB,GAAG0C,QACxGyvB,EACK/tB,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,WAAhB/Y,EAAKtB,GAAG0C,QACxG4uB,EACKltB,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,OAAhB/Y,EAAKtB,GAAG0C,QACxG6qB,EACKnpB,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,OAAhB/Y,EAAKtB,GAAG0C,QACxG0uB,EACKhtB,MAAMzB,GACN0B,OAAOzB,GACPqiB,YAAYA,GACZ3b,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,QAAhB/Y,EAAKtB,GAAG0C,QACxG0vB,EACKhuB,MAAMzB,GACN0B,OAAOzB,GACPqiB,YAAYA,GACZ3b,MAAM0oB,GAAYvoB,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,UAA6B,GAAjBvZ,EAAKtB,GAAGqa,OAA8B,QAAhB/Y,EAAKtB,GAAG0C,QAExGyS,GAAExQ,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAEpE,IAAI4vB,IAAald,GAAEhR,OAAM,eACpByW,MAAM4W,EAAW/nB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAC/CyX,GAAgBnd,GAAEhR,OAAM,kBACvByW,MAAM8W,EAAcjoB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAClD0X,GAAYpd,GAAEhR,OAAM,cACnByW,MAAMgX,EAAUnoB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAC9C2X,GAAard,GAAEhR,OAAM,eACpByW,MAAMkX,EAAWroB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAC/CoT,GAAa9Y,GAAEhR,OAAM,eACpByW,MAAM6W,EAAWhoB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAC/C4X,GAAgBtd,GAAEhR,OAAM,kBACvByW,MAAM+W,EAAcloB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAClDmT,GAAY7Y,GAAEhR,OAAM,cACnByW,MAAMiX,EAAUpoB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAC9C6X,GAAavd,GAAEhR,OAAM,eACpByW,MAAMmX,EAAWtoB,OAAO,SAAStE,GAAG,OAAQA,EAAE0V,YAE/C8X,GAAcb,EAAW1yB,OAAS0yB,EAAWrgB,IAAI,SAAS3E,GAAG,MAAOA,GAAEjH,SAASgjB,OAAO,SAAS/b,EAAEuO,GACjG,MAAOvO,GAAE2E,IAAI,SAASmhB,EAAK5yB,GAAG,OAAQ8E,EAAG8tB,EAAK9tB,EAAGyH,EAAGqmB,EAAKrmB,EAAI8O,EAAErb,GAAGuM,OACnE7N,SAASoG,EAAE,EAAGyH,EAAE,QACfsmB,GAAcd,EAAW3yB,OAAS2yB,EAAWtgB,IAAI,SAAS3E,GAAG,MAAOA,GAAEjH,SAASgjB,OAAO,SAAS/b,EAAEuO,GACjG,MAAOvO,GAAE2E,IAAI,SAASmhB,EAAK5yB,GAAG,OAAQ8E,EAAG8tB,EAAK9tB,EAAGyH,EAAGqmB,EAAKrmB,EAAI8O,EAAErb,GAAGuM,OACnE7N,SAASoG,EAAE,EAAGyH,EAAE,OAEnBumB,GAASnvB,OAAOovB,GAAYv1B,GAAGkf,OAAOlf,GAAGmf,MAAM8Q,GAAS/uB,OAAOi0B,IAAc,SAASxtB,GAAK,MAAOA,GAAEoH,KAC/F9I,OAAO,EAAGb,IAEfowB,EAASrvB,OAAOsvB,GAAYz1B,GAAGkf,OAAOlf,GAAGmf,MAAM+Q,GAAShvB,OAAOm0B,IAAc,SAAS1tB,GAAK,MAAOA,GAAEoH,KAC/F9I,OAAO,EAAGb,IAEfqvB,EAAO7a,QAAQ0b,EAAQnvB,UACvBuuB,EAAU9a,QAAQ0b,EAAQnvB,UAC1B2tB,EAAMla,QAAQ0b,EAAQnvB,UACtBytB,EAAOha,QAAQ0b,EAAQnvB,UAEvB2pB,EAAOlW,QAAQ4b,EAAQrvB,UACvBwuB,EAAU/a,QAAQ4b,EAAQrvB,UAC1B4pB,EAAMnW,QAAQ4b,EAAQrvB,UACtByuB,EAAOhb,QAAQ4b,EAAQrvB,UAEpBmuB,EAAW1yB,QAAQ5B,GAAG2N,WAAWqnB,IAAYp0B,KAAKgzB,GAClDW,EAAW3yB,QAAQ5B,GAAG2N,WAAWunB,IAAYt0B,KAAKg0B,GAElDR,EAAUxyB,QAAQ5B,GAAG2N,WAAWonB,IAAWn0B,KAAKkzB,GAChDO,EAAUzyB,QAAQ5B,GAAG2N,WAAW6iB,IAAW5vB,KAAKmvB,GAEhDiE,EAAWpyB,QAAQ5B,GAAG2N,WAAWknB,IAAYj0B,KAAK6zB;A5BxOjE,A4ByOeR,E5BzOb,A4ByOwBryB,C5BzOvB,GAAG,CAAC,CAAC,CAAC,C4ByOyB5B,GAAG2N,C5BzOvB,CAAC,CAAC,EAAE,GAAG,CAAC,E4ByO0B8iB,G5BzOrB,CAAC,A4ByOgC7vB,I5BzO5B,CAAC,A4ByOgCkvB,E5BzO9B,CAAC,A4B2OrBoE,EAActyB,Q5B3OiB,A4B2OT5B,GAAG2N,WAAWmnB,IAAel0B,KAAK8zB,GACxDP,EAAcvyB,QAAQ5B,GAAG2N,WAAWsnB,IAAer0B,KAAK+zB,GAE3DhY,EACK4H,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAUlY,EAAiB,GAEhCuS,GAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB/B,EAAkB,KAC1DpF,GAAG2N,WAAWgK,GAAEhR,OAAM,kBACjB/F,KAAK+b,GAEV8W,EACKlP,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAGhCnF,GAAG2N,WAAWgK,GAAEhR,OAAM,mBACjB/F,KAAK6yB,GAEVD,EACKjP,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAEhCnF,GAAG2N,WAAWgK,GAAEhR,OAAM,mBACjB/F,KAAK4yB,GAEV7b,GAAEhR,OAAM,kBACHuD,QAAO,cAAgB+lB,EAAQruB,QAAS,GAAQ,GAChDuF,KAAI,YAAc,aAAeG,EAAErB,QAAQ,GAAK,OAErD0R,GAAEhR,OAAM,kBACHuD,QAAO,cAAgBgmB,EAAQtuB,QAAS,GAAQ,GAChDuF,KAAI,YAAc,aAAeG,EAAErB,QAAQ,GAAK,OAErDqd,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvCliB,EAAMqR,WAGPkQ,IACCC,EACKld,MAAMzB,GACN0B,OAAOzB,GACPL,QAAQC,KAAKD,EAAOC,KAAMC,IAAIF,EAAOE,MACrCmC,aAAaV,GACbd,OAAO0B,GACZR,EAAKH,OAAM,mBAAoB/F,KAAKkjB,IAwGrCD,GACCC,EAAiB5jB,SAASiB,GAAE,mBAAqB,SAASC,GACtD0gB,GACA,IAAI2C,GAAa7C,EAAY8C,EAAgBC,IAC7C7gB,GACCmI,OAAO,SAASL,EAAQpJ,GAErB,MADAoJ,GAAOqY,YAAczhB,GACboJ,EAAOyR,WAElB5T,QAAQ,SAASmC,EAAOpJ,GACrB,GAAI0c,GAAS5X,EAAEnB,SACX6nB,EAAgBpiB,EAAOvD,OAAO4D,OAAO,SAAStE,EAAEnF,GAChD,MAAOF,GAAMgF,IAAIK,EAAEnF,IAAM0c,EAAO,IAAM5c,EAAMgF,IAAIK,EAAEnF,IAAM0c,EAAO,IAGnE0C,GAAapiB,EAAG4I,kBAAkB4lB,EAAe5sB,EAAE0E,YAAaxD,EAAMgF,IACtE,IAAIwH,GAAQkf,EAAcpM,GACtBqM,EAAc3rB,EAAMyM,IAAID,EAAO8S,EACf,QAAhBqM,GACAtM,EAAenf,EAAGof,GAAY,GAEpBve,SAAVyL,IACgBzL,SAAhBohB,IAA2BA,EAAc3V,GACtBzL,SAAnBqhB,IAA8BA,EAAiBpd,EAAEhF,EAAMgF,IAAIwH,EAAM8S,KACrE+C,EAAQ1hB,MACJ+I,IAAKJ,EAAOI,IACZR,MAAOyiB,EACPniB,MAAOA,EAAMF,EAAOA,EAAOqY,aAC3BngB,KAAMgL,EACN+N,MAAuB,GAAhBjR,EAAOiR,MAAa2W,EAASC,MAI5C,IAAIvF,GAAwB,SAASvmB,EAAEnF,GACnC,GAAIqa,GAAQ8H,EAAQniB,GAAGqa,KACvB,OAAY,OAALlV,EAAY,MAAQkV,EAAM7E,aAAarQ,GAGlDmc,GAAiBpkB,QACZwL,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAEhCyI,eAAe6Y,EAAiBpkB,QAAQuL,kBAAoBijB,GAC5DpqB,MACG0H,MAAOlJ,EAAMgF,IAAKmd,EAAY7C,GAC9BhZ,MAAOgZ,EACPhW,OAAQ+Y,MAGhBb,EAAiBre,gBAAgBif,KAGrCZ,EAAiB5jB,SAASiB,GAAE,kBAAmB,SAASC,GACpD0gB,QAGJ2S,EAAOv0B,SAASiB,GAAE,2BAA6BmyB,GAC/CxD,EAAO5vB,SAASiB,GAAE,2BAA6BmyB,GAC/CmB,EAAOv0B,SAASiB,GAAE,0BAA4B,SAASoc,GACnD7d,EAAQgG,QAAO,KAEnBoqB,EAAO5vB,SAASiB,GAAE,0BAA4B,SAASoc,GACnD7d,EAAQgG,QAAO,KAGnBgvB,EAAUx0B,SAASiB,GAAE,2BAA6BuyB,GAClDiB,EAAUz0B,SAASiB,GAAE,2BAA6BuyB,GAClDgB,EAAUx0B,SAASiB,GAAE,0BAA4B,SAASoc,GACtD7d,EAAQgG,QAAO,KAEnBivB,EAAUz0B,SAASiB,GAAE,0BAA4B,SAASoc,GACtD7d,EAAQgG,QAAO,KAGnBkuB,EAAO1zB,SAASiB,GAAE,2BAA6BwyB,GAC/CiB,EAAO10B,SAASiB,GAAE,2BAA6BwyB,GAC/CC,EAAO1zB,SAASiB,GAAE,0BAA4B,SAASoc,GACnD7d,EAAQgG,QAAO,KAEnBkvB,EAAO10B,SAASiB,GAAE,0BAA4B,SAASoc,GACnD7d,EAAQgG,QAAO,KAGnBouB,EAAM5zB,SAASiB,GAAE,2BAA6B0yB,GAC9C9D,EAAM7vB,SAASiB,GAAE,2BAA6B0yB,GAE9CC,EAAM5zB,SAASiB,GAAE,0BAA4B,SAASoc,GAClD7d,EAAQgG,QAAO,KAEnBqqB,EAAM7vB,SAASiB,GAAE,0BAA4B,SAASoc,GAClD7d,EAAQgG,QAAO,KAEnBouB,EAAM5zB,SAASiB,GAAE,2BAA6B,SAASoc,GACnD7d,MAEJqwB,EAAM7vB,SAASiB,GAAE,2BAA6B,SAASoc,GACnD7d,SAKL4C,EApeX,GAOIizB,GACAE,EARA1wB,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAChDue,EAAY,KACZzX,EAAQtM,EAAGG,MAAMuQ,eACjBtJ,EAAQ,KACRC,EAAS,KACTwc,GAAa,EACb9M,EAAS,KAGTkD,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9B0Y,EAAc,SACdrC,GAAa,EACbtB,EAAmBtkB,EAAGkE,uBACtBmgB,GAA0B,EAC1ByM,EAAsB,gBACtBroB,EAAW,IAOXX,EAAItH,GAAG8H,MAAMC,SACbutB,EAAUt1B,GAAG8H,MAAMC,SACnBytB,EAAUx1B,GAAG8H,MAAMC,SAEnB0sB,EAASj1B,EAAGI,OAAO4H,OAAOkT,OAAO4a,GAASrtB,SAASA,GACnD6nB,EAAStwB,EAAGI,OAAO4H,OAAOkT,OAAO8a,GAASvtB,SAASA,GAEnDysB,EAAYl1B,EAAGI,OAAOgtB,UAAUlS,OAAO4a,GAASrtB,SAASA,GACzD0sB,EAAYn1B,EAAGI,OAAOgtB,UAAUlS,OAAO8a,GAASvtB,SAASA,GAEzD6rB,EAAQt0B,EAAGI,OAAOmxB,WAAWG,SAAQ,GAAOxW,OAAO4a,GAASrtB,SAASA,GACrE8nB,EAAQvwB,EAAGI,OAAOmxB,WAAWG,SAAQ,GAAOxW,OAAO8a,GAASvtB,SAASA,GAErE2rB,EAASp0B,EAAGI,OAAO81B,cAAchb,OAAO4a,GAASrtB,SAASA,GAC1D2sB,EAASp1B,EAAGI,OAAO81B,cAAchb,OAAO8a,GAASvtB,SAASA,GAE1D0U,EAAQnd,EAAGI,OAAO8X,OAAO5P,MAAMR,GAAGuQ,OAAM,UAAWW,YAAY,GAAGvQ,SAASA,GAC3EwrB,EAASj0B,EAAGI,OAAO8X,OAAO5P,MAAMwtB,GAASzd,OAAM,QAAS5P,SAASA,GACjEurB,EAASh0B,EAAGI,OAAO8X,OAAO5P,MAAM0tB,GAAS3d,OAAM,SAAU5P,SAASA,GAElEqb,EAAS9jB,EAAGI,OAAO0jB,SAASzc,OAAO,IACnCnH,EAAUF,EAAGI,OAAOF,UACpBQ,EAAWF,GAAGE,WAEdL,GAAU40B,EAAQ3E,EAAQ4E,EAAWC,EAAWb,EAAO/D,EAAO6D,EAAQgB,EA8hB1E,OAlGAtyB,GAAMpC,SAAWA,EACjBoC,EAAMghB,OAASA,EACfhhB,EAAMmyB,OAASA,EACfnyB,EAAMwtB,OAASA,EACfxtB,EAAMoyB,UAAYA,EAClBpyB,EAAMqyB,UAAYA,EAClBryB,EAAMwxB,MAAQA,EACdxxB,EAAMytB,MAAQA,EACdztB,EAAMsxB,OAASA,EACftxB,EAAMsyB,OAASA,EACftyB,EAAMqa,MAAQA,EACdra,EAAMmxB,OAASA,EACfnxB,EAAMkxB,OAASA,EACflxB,EAAM5C,QAAUA,EAChB4C,EAAMwhB,iBAAmBA,EAEzBxhB,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9EotB,UAAgB9mB,IAAK,WAAW,MAAO8mB,IAAY7mB,IAAK,SAASvG,GAAGotB,EAASptB,IAC7EstB,UAAchnB,IAAK,WAAW,MAAOgnB,IAAY/mB,IAAK,SAASvG,GAAGstB,EAASttB,IAC3EoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IACrEsf,aAAiBhZ,IAAK,WAAW,MAAOgZ,IAAe/Y,IAAK,SAASvG,GAAGsf,EAAYtf,IACpFmoB,qBAAyB7hB,IAAK,WAAW,MAAO6hB,IAAuB5hB,IAAK,SAASvG,GAAGmoB,EAAoBnoB,IAG5GpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9Bb,GAAImH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAC7CsR,EAAOtR,EACPssB,EAAOntB,EAAEa,GACT2nB,EAAOxoB,EAAEa,GACTusB,EAAUptB,EAAEa,GACZwsB,EAAUrtB,EAAEa,GACZ2rB,EAAMxsB,EAAEa,GACR4nB,EAAMzoB,EAAEa,GACRyrB,EAAOtsB,EAAEa,GACTysB,EAAOttB,EAAEa,KAEb4G,GAAIN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAC7CkZ,EAAOlZ,EACPssB,EAAO1lB,EAAE5G,GACT2nB,EAAO/gB,EAAE5G,GACTusB,EAAU3lB,EAAE5G,GACZwsB,EAAU5lB,EAAE5G,GACZyrB,EAAO7kB,EAAE5G,GACTysB,EAAO7lB,EAAE5G,GACT2rB,EAAM/kB,EAAE5G,GACR4nB,EAAMhhB,EAAE5G,KAEZid,YAAa3W,IAAK,WAAW,MAAO2W,IAAc1W,IAAK,SAASvG,GAC5Did,EAAWjd,EACXssB,EAAOrP,WAAWjd,GAClB2nB,EAAO1K,WAAWjd,GAClByrB,EAAOxO,WAAWjd,GAClBysB,EAAOxP,WAAWjd,KAGtB0b,yBAA0BpV,IAAK,WAAW,MAAOoV,IAA2BnV,IAAK,SAASvG,GACtF0b,EAA0B1b,EACtB0b,IACA4Q,EAAO/S,aAAY,GACnB+S,EAAOrP,YAAW,GAClB0K,EAAOpO,aAAY,GACnBoO,EAAO1K,YAAW,GAClBwO,EAAOlS,aAAY,GACnBkS,EAAOxO,YAAW,GAClBwP,EAAOlT,aAAY,GACnBkT,EAAOxP,YAAW,GAClBsP,EAAUhT,aAAY,GACtBiT,EAAUjT,aAAY,MAI9BzZ,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,GACVssB,EAAQ3E,EAAQ8D,EAAQgB,EAAQF,EAAWC,EAAWhY,EAAO8W,EAAQD,GAAQ/pB,QAAQ,SAAS6I,GAC7FA,EAAMrK,SAASA,SAKzBzI,EAAGG,MAAMqP,YAAY1M,GAEdA,GCnlBX9C,EAAGI,OAAO2sB,QAAU,WAChB,YAoCA,SAASjqB,GAAMsB,GAiIX,MAhIAA,GAAUC,KAAK,SAASC,GACpB4C,EAAY1G,GAAG2G,OAAOpG,KACtB,IAAI4E,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAElEvF,GAAGG,MAAMsW,QAAQvP,EAGjB,IAAI2R,GAAKlT,EAAiBrB,EAAK,GAAGuE,OAAOzG,OAAU,EAGnD0F,GAAEnB,OAAOqT,GAAWxZ,GAAGkf,OAAOpb,EAAK,GAAGuE,OAAO4L,IAAIwF,GAAMvY,OAAOke,KAE1DwB,EACAtZ,EAAErB,MAAMyT,IAA4B,GAAjBvU,EAAsBrB,EAAK,GAAGuE,OAAOzG,OAAQuD,GAAkBrB,EAAK,GAAGuE,OAAOzG,OAAS,IAAOkC,EAAK,GAAGuE,OAAOzG,SAEhI0F,EAAErB,MAAMyT,IAAW,EAAIrB,EAAE,EAAGlT,EAAiBkT,EAAE,EAAI,IAEvDtJ,EAAE5I,OAAOyT,IACD5Z,GAAGgJ,IAAIlF,EAAK,GAAGuE,OAAO4L,IAAI4M,GAAQ3f,OAAO4f,IACzC9gB,GAAG6I,IAAI/E,EAAK,GAAGuE,OAAO4L,IAAI8M,GAAS7f,OAAO4f,MAEhD7a,MAAM0U,IAAWvV,EAAiB,IAGhCkC,EAAEnB,SAAS,KAAOmB,EAAEnB,SAAS,KAC7BmB,EAAEnB,SAAS,GACPmB,EAAEnB,QAAQmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,GAAWmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,KACzEmB,EAAEnB,QAAM,GAAK,KAEnB4I,EAAE5I,SAAS,KAAO4I,EAAE5I,SAAS,KAC7B4I,EAAE5I,SAAS,GACP4I,EAAE5I,QAAQ4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,GAAW4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,KACzE4I,EAAE5I,QAAM,GAAK,IAGvB,IAAIW,GAAO9G,GAAG2G,OAAOpG,MAAMwG,UAAS,wBAAyBjD,MAAMA,EAAK,GAAGuE,SACvErB,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,2BACnD8V,EAAYjW,EAAUE,OAAM,QAC5B8V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,YAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvEyB,EACKvF,GAAE,QAAU,SAASwG,EAAEnF,GACpBtC,EAAS8gB,YACLld,KAAM6D,EACNiB,MAAOpG,EACPiK,IAAKzM,GAAGuE,MACRwF,GAAIA,MAIhBkT,EAAU/V,OAAM,YACXC,KAAI,KAAO,sBAAwB4C,GACnC7C,OAAM,QAEXJ,EAAKH,OAAM,uBAA0BoD,EAAK,SACrC5C,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpBuS,EAAKxQ,KAAI,YAAc8Z,EAAW,2BAA6BlX,EAAK,IAAM,GAE1E,IAAI6N,GAAQ9Q,EAAKH,OAAM,aAAcI,UAAS,YACzCjD,KAAK,SAAS6D,GAAK,MAAOA,IAC/BiQ,GAAMhQ,OAAOC,SAEb+P,EAAM3Q,QAAQC,OAAM,QACfC,KAAI,QAAU,SAASQ,EAAEnF,EAAEwY,GAAK,OAAQmG,EAAQxZ,EAAEnF,GAAK4e,EAASzZ,EAAEnF,GAAK,mBAAqB,oBAAsB,YAAcwY,EAAI,IAAMxY,IAC1I2E,KAAI,IAAM,SAASQ,EAAEnF,GAClB,MAAO,WACAuM,EAAEoS,EAAQxZ,EAAEnF,IACTuM,EAAEgS,EAAQpZ,EAAEnF,KAChB,KACE6V,EAAE,EACJ,MACCA,EAAE,EACH,SACCtJ,EAAE8R,EAAOlZ,EAAEnF,IAAMuM,EAAEoS,EAAQxZ,EAAEnF,KAC9B,OACCuM,EAAEqS,EAASzZ,EAAEnF,IACVuM,EAAE8R,EAAOlZ,EAAEnF,KACf,IACC6V,EAAE,EACH,OACEA,EAAE,EACJ,QAETlR,KAAI,YAAc,SAASQ,EAAEnF,GAAK,MAAO,aAAe8E,EAAEmS,EAAK9R,EAAEnF,IAAM,IAAMuM,EAAEgS,EAAQpZ,EAAEnF,IAAM,MAC/F2E,KAAI,OAAS,SAASQ,EAAEnF,GAAK,MAAOsJ,GAAM,KAC1C3E,KAAI,SAAW,SAASQ,EAAEnF,GAAK,MAAOsJ,GAAM,KAC5C3E,KAAI,IAAM,GACVA,KAAI,IAAM,SAASQ,EAAEnF,GAAM,MAAOuM,GAAE7F,KAAKL,IAAI,EAAGwY,EAAK1Z,EAAEnF,OACvD2E,KAAI,SAAW,SAASQ,EAAEnF,GAAK,MAAO0G,MAAKC,IAAI4F,EAAEsS,EAAK1Z,EAAEnF,IAAMuM,EAAE,MAGrE6I,EAAMzQ,KAAI,QAAU,SAASQ,EAAEnF,EAAEwY,GAC7B,OAAQmG,EAAQxZ,EAAEnF,GAAK4e,EAASzZ,EAAEnF,GAAK,mBAAqB,oBAAsB,YAAcwY,EAAI,IAAMxY,IAG9GxC,GAAG2N,WAAWiK,GACTzQ,KAAI,YAAc,SAASQ,EAAEnF,GAAK,MAAO,aAAe8E,EAAEmS,EAAK9R,EAAEnF,IAAM,IAAMuM,EAAEgS,EAAQpZ,EAAEnF,IAAM,MAC/F2E,KAAI,IAAM,SAASQ,EAAEnF,GAClB,GAAI6V,GAAKlT,EAAiBrB,EAAK,GAAGuE,OAAOzG,OAAU,EACnD,OAAO,WACAmN,EAAEoS,EAAQxZ,EAAEnF,IACTuM,EAAEgS,EAAQpZ,EAAEnF,KAChB,KACE6V,EAAE,EACJ,MACCA,EAAE,EACH,SACCtJ,EAAE8R,EAAOlZ,EAAEnF,IACRuM,EAAEoS,EAAQxZ,EAAEnF,KAChB,OACCuM,EAAEqS,EAASzZ,EAAEnF,IACVuM,EAAE8R,EAAOlZ,EAAEnF,KACf,IACC6V,EAAE,EACH,OACEA,EAAE,EACJ,UAIX/V,EA/JX,GAmBMkX,GACAI,EACAF,EACAiB,EAtBF5V,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,KACRC,EAAS,KACTkD,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrBjE,EAAY,KACZY,EAAItH,GAAG8H,MAAMC,SACbgH,EAAI/O,GAAG8H,MAAMC,SACb0R,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9BoS,EAAU,SAASxZ,GAAK,MAAOA,GAAE2Z,MACjCF,EAAW,SAASzZ,GAAK,MAAOA,GAAE4Z,OAClCR,EAAU,SAASpZ,GAAK,MAAOA,GAAE6Z,MACjCX,EAAS,SAASlZ,GAAK,MAAOA,GAAE8Z,KAChCrC,KACA0B,KACAF,GAAc,EACdK,GAAW,EACXnV,EAAQtM,EAAGG,MAAMuQ,eACjBwR,GAAc,EAKdxhB,EAAWF,GAAGE,SAAQ,cAAgB,cAAe,YAAa,aAAc,eAAgB,kBAAmB,mBAAoB,kBAAmB,mBAsMhK,OAzDAoC,GAAMqf,eAAiB,SAASC,EAAYC,GACxCvf,EAAMwf,kBACNpb,EAAUC,OAAM,0BAA6Bib,GACxC1X,QAAO,QAAU2X,IAI1Bvf,EAAMwf,gBAAkB,WACpBpb,EAAUC,OAAM,8BACXuD,QAAO,SAAU,IAQ1B5H,EAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAW6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAClEtB,QAAW4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACpEvC,QAAW6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IAC1DuS,QAAWjM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IAC1DqR,SAAW/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IACtEyR,SAAWnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IACtEuR,QAAWjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACpEwS,QAAWlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACpEiX,QAAW3Q,IAAK,WAAW,MAAO2Q,IAAU1Q,IAAK,SAASvG,GAAGiX,EAAOjX,IACpE2Y,QAAWrS,IAAK,WAAW,MAAOqS,IAAUpS,IAAK,SAASvG,GAAG2Y,EAAO3Y,IACpEyY,SAAWnS,IAAK,WAAW,MAAOmS,IAAWlS,IAAK,SAASvG,GAAGyY,EAAQzY,IACtE8Y,UAAWxS,IAAK,WAAW,MAAOwS,IAAYvS,IAAK,SAASvG,GAAG8Y,EAAS9Y,IACxE4B,IAAW0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAC5DuZ,aAAcjT,IAAK,WAAW,MAAOiT,IAAehT,IAAK,SAASvG,GAAGuZ,EAAYvZ,IAEjFb,GAAQmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IAC7D4G,GAAQN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAKlZ,IAC7DmZ,MAAQ7S,IAAK,WAAW,MAAO0S,MAAazS,IAAK,SAASvG,GAAGgZ,EAAQhZ,IACrEoZ,OAAQ9S,IAAK,WAAW,MAAO2S,MAAc1S,IAAK,SAASvG,GAAGiZ,EAASjZ,IACvEqZ,MAAQ/S,IAAK,WAAW,MAAOsS,IAAWrS,IAAK,SAASvG,GAAG4Y,EAAQ5Y,IACnEsZ,KAAQhT,IAAK,WAAW,MAAOoS,IAAUnS,IAAK,SAASvG,GAAG0Y,EAAO1Y,IAGjEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAqB5B,QAAZ8E,EAAElD,IAAsBkD,EAAElD,IAASF,EAAOE,IAC1DF,EAAOuR,MAAqBjT,QAAZ8E,EAAEmO,MAAsBnO,EAAEmO,MAASvR,EAAOuR,MAC1DvR,EAAOsR,OAAqBhT,QAAZ8E,EAAEkO,OAAsBlO,EAAEkO,OAAStR,EAAOsR,OAC1DtR,EAAOC,KAAqB3B,QAAZ8E,EAAEnD,KAAsBmD,EAAEnD,KAASD,EAAOC,OAE9D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAIlC3I,EAAGG,MAAMqP,YAAY1M,GACdA,GCnOX9C,EAAGI,OAAO+1B,oBAAsB,WAC5B,YAsCA,SAASrzB,GAAMsB,GAgYX,MA/XAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GA+NpB,QAASkvB,GAAKrrB,GACV,MAAOH,GAAKouB,EAAkB3hB,IAAI,SAAUtI,GAExC,GAAIgE,MAAMhI,EAAEU,OAAOsD,EAAEK,OAAS2D,MAAMuH,WAAWvP,EAAEU,OAAOsD,EAAEK,QAAU6pB,EAA0B,CAC1F,GAAI1vB,GAAS4I,EAAEpD,EAAEK,KAAK7F,SAClBF,EAAQ8I,EAAEpD,EAAEK,KAAK/F,QACjB+C,EAAM7C,EAAO,IAAMA,EAAO,GAAKA,EAAO,IAAM,CAGhD,IAAI2vB,EAAwBnjB,QAAQhH,EAAEK,KAAO,EAAG,CAE5C,GAAI+pB,GAAW/1B,GAAG8H,MAAMC,SAAS5B,QAAQ6C,EAAK7C,EAAO,KAAKF,OAAOb,EAAkB,GAAIa,EAAM,IAC7F8I,GAAEpD,EAAEK,KAAK4a,MAAM7X,EAAEgnB,GACjBD,EAAwB7yB,KAAK0I,EAAEK,KAEnC,GAAI2D,MAAMhI,EAAEU,OAAOsD,EAAEK,OAAS2D,MAAMuH,WAAWvP,EAAEU,OAAOsD,EAAEK,OACtD,OAAQ1E,EAAEqE,EAAEK,KAAM+C,EAAEpD,EAAEK,KAAKhD,IAcnC,MAT0B3F,UAAtB2yB,IACIF,EAAwBl0B,OAAS,GAAKi0B,GACtCG,EAAkB/rB,MAAK,UAAY,UACnCgsB,EAAsBhsB,MAAK,UAAY,YAEvC+rB,EAAkB/rB,MAAK,UAAY,QACnCgsB,EAAsBhsB,MAAK,UAAY,WAGvC3C,EAAEqE,EAAEK,KAAM+C,EAAEpD,EAAEK,KAAKrE,EAAEU,OAAOsD,EAAEK,UAI9C,QAASkqB,GAAaC,GAClBC,EAAQ3sB,QAAQ,SAAUyR,GAEtB,GAAImb,GAActnB,EAAEmM,EAAEob,WAAW1P,MAAM7X,IAAI5I,QACvC+U,GAAEqb,aACFrb,EAAEgE,OAAO,IAAMnQ,EAAEmM,EAAEob,WAAWnwB,SAAS,GAAKkwB,EAAY,KAAOnb,EAAEgE,OAAO,GAAKhE,EAAEgE,OAAO,KAAOsX,EAAkBtb,EAAEob,WAAapb,EAAEgE,OAAO,IAAMmX,EAAY,IAEzJnb,EAAEub,SACFvb,EAAEgE,OAAO,GAAKmX,EAAY,IAE1BF,GACApnB,EAAEmM,EAAEob,WAAW1P,MAAM1H,OAAOhE,EAAEgE,UAGtCwX,EAAW/vB,OAAM,uBACZ9C,KAAK,SAAU8D,GACZ3H,GAAG2G,OAAOpG,MAAMK,KAAKmO,EAAEpH,EAAEqE,KAAK4a,SAGjC7f,UAAS,QACTI,KAAI,IAAM,IACVA,KAAI,QAAU,IAEnBwvB,IAIJ,QAASC,KAEDC,KAAiB,IACjBA,GAAe,EACfX,GAAa,IAKrB,QAAStP,KACLkQ,EAAUC,EAAe9qB,OAAO,SAAUN,GAAK,OAAQoD,EAAEpD,GAAGib,MAAMC,UAClEmQ,EAAUF,EAAQ7iB,IAAI,SAAStI,GAAK,MAAOoD,GAAEpD,GAAGib,MAAM1H,WAEtDkX,KACAU,EAAQrtB,QAAQ,SAAS9B,EAAEnF,GACvB4zB,EAAQ5zB,IACJ8zB,UAAW3uB,EACXuX,OAAQ8X,EAAQx0B,GAChBi0B,QAAQ,EACRF,YAAY,KAIpBp0B,KACA80B,EAAWhtB,MAAK,UAAY,SAAStC,GACjC,GAAIuvB,GAAWJ,EAAQhkB,MAAM,SAASnH,EAAGnJ,GACrC,OAAKmN,MAAMhI,EAAEU,OAAOsD,KAAOgE,MAAMuH,WAAWvP,EAAEU,OAAOsD,OAASqrB,EAAQx0B,GAAG,IAAMuM,EAAEpD,GAAGib,MAAM7X,IAAI5I,SAAS,IAAW,EAC1G6wB,EAAQx0B,GAAG,IAAMmF,EAAEU,OAAOsD,IAAMhE,EAAEU,OAAOsD,IAAMqrB,EAAQx0B,GAAG,KAAQmN,MAAMuH,WAAWvP,EAAEU,OAAOsD,MAGxG,OADIurB,IAAU/0B,EAAOc,KAAK0E,GACnBuvB,EAAW,KAAO,SAG7BP,IAEAz2B,EAAS0mB,OACLwP,QAASA,EACTj0B,OAAQA,IAGhB,QAASg1B,KACL,GAAIC,GAAiBN,EAAQl1B,OAAS,GAAI,GAAO,CACjDw0B,GAAQ3sB,QAAQ,SAAUyR,GAClBA,EAAEgE,OAAO,KAAOnQ,EAAEmM,EAAEob,WAAW1P,MAAM7X,IAAI5I,SAAS,IAAM2vB,EAAwBnjB,QAAQuI,EAAEob,YAAc,IACxGpb,EAAEub,QAAS,GACXvb,EAAEgE,OAAO,GAAKnQ,EAAEmM,EAAEob,WAAWnwB,SAAS,KACtC+U,EAAEqb,YAAa,KAEvBr2B,EAASm3B,SAASl1B,EAAQi1B,GAE9B,QAAST,KACLD,EAAW/vB,OAAM,YACZ9C,KAAK,SAAU8D,EAAGnF,GACf,GAAI0Y,GAAIkb,EAAQnqB,OAAO,SAAUmf,GAAK,MAAOA,GAAEkL,WAAa3uB,EAAEqE,KAC9DsrB,GAAa3vB,EAAEqE,KAAO+C,EAAEpH,EAAEqE,KAAK7F,SAGf,GAAZ+U,EAAEtZ,QAAei1B,IAEjBS,EAAa3vB,EAAEqE,QACXkP,EAAE,GAAGgE,OAAO,GAAKnQ,EAAEpH,EAAEqE,KAAK7F,SAAS,KACnCmxB,EAAa3vB,EAAEqE,MAAQkP,EAAE,GAAGgE,OAAO,KACnChE,EAAE,GAAGgE,OAAO,IAAMnQ,EAAEpH,EAAEqE,KAAK7F,SAAS,IACpCmxB,EAAa3vB,EAAEqE,KAAK/I,KAAKiY,EAAE,GAAGgE,OAAO,KAG7Clf,GAAG2G,OAAOpG,MAAMK,KAAK8W,EAAK5P,MAAMiH,EAAEpH,EAAEqE,MAAMgM,WAAWrQ,EAAEwE,QAAQorB,WAAWD,EAAa3vB,EAAEqE,SAGrG,QAASiW,GAAUta,GACf6vB,EAAS7vB,EAAEqE,KAAOzL,KAAK0Q,WAAWwmB,WAAanwB,EAAEK,EAAEqE,KACnD0rB,EAAWvwB,KAAI,aAAe,UAElC,QAAS+a,GAASva,GACd6vB,EAAS7vB,EAAEqE,KAAO9C,KAAKF,IAAI7D,EAAgB+D,KAAKL,IAAI,EAAGtI,KAAK0Q,WAAWwmB,YAAcz3B,GAAGuE,MAAM+C,IAC9F2vB,EAAW9vB,KAAI,IAAM6rB,GACrB4C,EAAkBxgB,KAAK,SAAU9F,EAAGuO,GAAK,MAAO8Z,GAAkBroB,EAAEtD,KAAO2rB,EAAkB9Z,EAAE7R,OAC/F4pB,EAAkBnsB,QAAQ,SAAU9B,EAAGnF,GAAK,MAAOmF,GAAEiwB,gBAAkBp1B,IACvE8E,EAAEnB,OAAOyvB,EAAkB3hB,IAAI,SAAUtM,GAAK,MAAOA,GAAEqE,OACvD0qB,EAAWvvB,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAegwB,EAAkBhwB,EAAEqE,KAAO,MAEhG,QAASqW,GAAQ1a,EAAGnF,SACTjC,MAAK0Q,WAAWwmB,iBAChBD,GAAS7vB,EAAEqE,KAClBhM,GAAG2G,OAAOpG,KAAK0Q,YAAY9J,KAAI,YAAc,aAAeG,EAAEK,EAAEqE,KAAO,KACvEirB,EACG9vB,KAAI,IAAM6rB,GACb0E,EACGvwB,KAAI,IAAM6rB,GACV7rB,KAAI,aAAe,MAEtBjH,EAAS23B,gBAAgBjC,GAE7B,QAAS+B,GAAkBhwB,GACvB,GAAIe,GAAI8uB,EAAS7vB,EACjB,OAAY,OAALe,EAAYpB,EAAEK,GAAKe,EA1X9B,GAAIhC,GAAY1G,GAAG2G,OAAOpG,KAO1B,IANA4E,EAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,GAE9DvF,EAAGG,MAAMsW,QAAQvP,GAGMrD,SAAnBS,EAAK,GAAGuE,OAAsB,CAC9B,GAAIyvB,KACJh0B,GAAK2F,QAAQ,SAAU9B,GACf,GAAIowB,MACA/rB,EAAMuC,OAAOyZ,KAAKrgB,EACtBqE,GAAIvC,QAAQ,SAAU2hB,GAAe,SAANA,IAAc2M,EAAI3M,GAAKzjB,EAAEyjB,MACxD0M,EAAQ70B,MAAO+I,IAAKrE,EAAE7F,KAAMuG,OAAQ0vB,MAE5Cj0B,EAAOg0B,EAGX,GAAIE,GAAal0B,EAAKmQ,IAAI,SAAUtM,GAAI,MAAOA,GAAEU,QAC3B,KAAlBlG,EAAOP,SACPO,EAAS2B,GAGbizB,EAAiBkB,EAAc7iB,KAAK,SAAU9F,EAAGuO,GAAK,MAAOvO,GAAEsoB,gBAAkB/Z,EAAE+Z,kBAAoB3jB,IAAI,SAAUtM,GAAK,MAAOA,GAAEqE,MACnI4pB,EAAoBqC,EAAchsB,OAAO,SAAUtE,GAAK,OAAQA,EAAE0V,WAGlE/V,EAAE4wB,aAAa,EAAG/yB,GAAiB,GAAGgB,OAAOyvB,EAAkB3hB,IAAI,SAAUtM,GAAK,MAAOA,GAAEqE,MAI3F,IAAIwqB,MACAX,GAA2B,EAC3ByB,IAEJP,GAAettB,QAAQ,SAAS9B,GAC5B,GAAIuX,GAASlf,GAAGkf,OAAO8Y,EAAY,SAAUrsB,GAAK,OAAQA,EAAEhE,KACxDqB,EAAMkW,EAAO,GACbrW,EAAMqW,EAAO,GACbiZ,GAAsB,GAEtBxoB,MAAM3G,IAAQ2G,MAAM9G,MACpBsvB,GAAsB,EACtBnvB,EAAM,EACNH,EAAM,GAGNG,IAAQH,IACRG,GAAY,EACZH,GAAY,EAEhB,IAAIqS,GAAIkb,EAAQnqB,OAAO,SAAUmf,GAAK,MAAOA,GAAEkL,WAAa3uB,GAC3C,KAAbuT,EAAEtZ,SAEEu2B,GACAnvB,EAAM+F,EAAEpH,GAAGxB,SAAS,GACpB0C,EAAMkG,EAAEpH,GAAGxB,SAAS,KAGd+U,EAAE,GAAGqb,YAAcM,GACzB7tB,EAAMA,EAAMkS,EAAE,GAAGgE,OAAO,GAAKhE,EAAE,GAAGgE,OAAO,GAAKlW,EAC9CH,EAAMA,EAAMqS,EAAE,GAAGgE,OAAO,GAAKhE,EAAE,GAAGgE,OAAO,GAAKrW,GAGzCqS,EAAE,GAAGub,SACV5tB,EAAMA,EAAMqS,EAAE,GAAGgE,OAAO,GAAKhE,EAAE,GAAGgE,OAAO,GAAKrW,EAC9C2tB,EAAkB7uB,GAAKoH,EAAEpH,GAAGxB,SAAS,GACrC0vB,GAA2B,IAKnC9mB,EAAEpH,GAAK3H,GAAG8H,MAAMC,SACX5B,QAAQ6C,EAAKH,IACb5C,OAAgC,IAAxBb,EAAkB,IAAW,IAE1C0wB,KACA/mB,EAAEpH,GAAGif,MAAQ5mB,GAAG0V,IAAIkR,QAAQ7X,EAAEA,EAAEpH,IAAIxG,GAAE,aAAey1B,GAAYz1B,GAAE,QAAUylB,GAAOzlB,GAAE,WAAag2B,IAIvG,IAAIrwB,GAAOJ,EAAUK,UAAS,oCAAqCjD,MAAMA,IACrEkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,uCACnD6V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,qCACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,qCACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,4CAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvEuC,EAAKigB,YAAW,YAAa2Q,QAAQC,GACrC3gB,EAAKG,OAAM,OACX,IAMIme,GAAmBC,EANnBqC,EAAWt4B,GAAG6iB,SAASC,OACd3hB,GAAE,YAAc8gB,GAChB9gB,GAAE,OAAS+gB,GACX/gB,GAAE,UAAYkhB,GAIvBngB,EAAOoF,EAAErB,QAAQ,GAAKqB,EAAErB,QAAQ,EAEpC,IADA/D,EAAOyN,MAAMzN,GAAQoF,EAAErB,QAAQ,GAAK/D,GAC/ByN,MAAMzN,GAAO,CACd,GAAIq2B,IAAY,EAAIr2B,EAAO,EAAGkD,EAAkB,GAAID,EAAiBjD,EAAO,EAAGkD,EAAkB,GACjG4wB,GAAoBlvB,EAAKH,OAAM,sBAAuBI,UAAS,QAASjD,MAAMy0B,IAC9EvC,EAAkB/uB,QAAQC,OAAM,QAChC8uB,EAAkBpuB,OAAOC,SACzBmuB,EAAkB7uB,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAE,KAC3CR,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAE,KAClCR,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAE,KAClCR,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAE,KAG3CsuB,EAAwBnvB,EAAKH,OAAM,sBAAuBI,UAAS,QAASjD,MAAM00B,IAClFvC,EAAsB/uB,OAAM,QAASpD,MAAM00B,IAC3CvC,EAAsBhvB,QAAQC,OAAM,QACpC+uB,EAAsBruB,OAAOC,SAC7BouB,EAAsB9uB,KAAI,IAAM/B,GAEvB+B,KAAI,IAAMhC,EAAiB,GAAKjD,EAAO,GACvCwP,KAAK,SAAS/J,GAAK,MAAOA,KAGvC+vB,EAAa5wB,EAAKH,OAAM,eAAgBI,UAAS,QAASjD,KAAKA,GAC/D4zB,EAAWzwB,QAAQC,OAAM,QACzBwwB,EAAW9vB,OAAOC,SAClB6vB,EAAWvwB,KAAI,IAAM6rB,GAGrBiE,EAAanwB,EAAKH,OAAM,eAAgBI,UAAS,QAASjD,KAAKA,GAC/DmzB,EAAWhwB,QAAQC,OAAM,QACzB+vB,EAAWrvB,OAAOC,SAClBovB,EAAW9vB,KAAI,IAAM6rB,GAChB/oB,MAAK,eAAiB,SAAUtC,EAAGnF,GACY,MAA5CmN,OAAMhI,EAAEmlB,eAAgBnlB,EAAEmlB,YAAc,GAAWnlB,EAAEmlB,cACxD3lB,KAAI,SAAW,SAAUQ,EAAGnF,GAAK,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KACjEy0B,EAAW91B,GAAE,YAAc,SAAUwG,EAAGnF,GACpCxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GAAMD,MAAK,eAAiBtC,EAAEmlB,YAAc,EAAI,MAAM7iB,MAAK,iBAAmB,GAC/G/J,EAASsb,kBACLK,MAAOlU,EAAE7F,KACTgK,MAAOnE,EAAEmE,OAASA,EAAMnE,EAAGnF,GAC3B6F,OAAQV,EAAEU,OACVquB,WAAYd,MAIpBqB,EAAW91B,GAAE,WAAa,SAAUwG,EAAGnF,GACnCxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GAAOD,MAAK,eAAiBtC,EAAEmlB,YAAc,MAAM7iB,MAAK,iBAAmB,IAC5G/J,EAASsF,iBACLqW,MAAOlU,EAAE7F,KACT8G,MAAOpG,MAGfy0B,EAAW91B,GAAE,YAAc,SAAUwG,EAAGnF,GACpCtC,EAASmG,qBAEb4wB,EAAW91B,GAAE,QAAU,SAAUwG,GAC7BzH,EAASqG,cACLwD,GAAIpC,EAAEoC,OAId2sB,EAAa/e,EAAE5Q,UAAS,cAAejD,KAAK8xB,EAC5C,IAAI6C,GAAkB/B,EAAWzvB,QAAQC,OAAM,KAAMC,KAAI,QAAU,mCAEnEuvB,GAAWvvB,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAeL,EAAEK,EAAEqE,KAAO,QAC5EysB,EAAgBvxB,OAAM,KAAMC,KAAI,QAAU,WAG1CsxB,EAAgBvxB,OAAM,QACjBC,KAAI,QAAU,YACd8C,MAAK,SAAW,QAChB9C,KAAI,KAAO,QACXA,KAAI,cAAgB,UACpBhG,GAAE,YAAc,SAASwG,EAAGnF,GACzBtC,EAASsb,kBACLK,MAAOlU,EAAEjI,SAAWiI,EAAEqE,IACtBF,MAAOnE,EAAEmE,UAGhB3K,GAAE,WAAa,SAASwG,EAAGnF,GACxBtC,EAASsF,iBACLqW,MAAOlU,EAAEjI,YAGhByB,GAAE,YAAc,SAAUwG,EAAGnF,GAC1BtC,EAASmG,qBAEZzF,KAAK03B,GAEVG,EAAgBvxB,OAAM,KAAMC,KAAI,QAAU,sBAC1CuvB,EAAW9uB,OAAOC,SAClB6uB,EAAW/vB,OAAM,aAAc+K,KAAK,SAAU/J,GAAK,MAAOA,GAAEqE,MAG5DkqB,EAAaW,EAEb,IAAIC,GAAUC,EAAe9qB,OAAO,SAAUN,GAAK,OAAQoD,EAAEpD,GAAGib,MAAMC,UAC9DmQ,EAAUF,EAAQ7iB,IAAI,SAAUtI,GAAK,MAAOoD,GAAEpD,GAAGib,MAAM1H,WAC3DwZ,GAAev2B,EAAOxB,MAAM,EAGhCwB,MACA80B,EAAWhtB,MAAK,UAAY,SAAUtC,GAClC,GAAIuvB,GAAWJ,EAAQhkB,MAAM,SAAUnH,EAAGnJ,GACtC,OAAKmN,MAAMhI,EAAEU,OAAOsD,KAAOgE,MAAMuH,WAAWvP,EAAEU,OAAOsD,OAASqrB,EAAQx0B,GAAG,IAAMuM,EAAEpD,GAAGib,MAAM7X,IAAI5I,SAAS,IAC5F,EAEH6wB,EAAQx0B,GAAG,IAAMmF,EAAEU,OAAOsD,IAAMhE,EAAEU,OAAOsD,IAAMqrB,EAAQx0B,GAAG,KAAQmN,MAAMuH,WAAWvP,EAAEU,OAAOsD,MAIxG,OAFIurB,IACA/0B,EAAOc,KAAK0E,GACRuvB,EAAoB,KAAT,UAInBd,EAAQx0B,OAAS,IAAMpC,EAAGG,MAAM2X,YAAYnV,EAAQu2B,MACrDx4B,EAASy4B,cAAcx2B,KAmKvBG,EAhaX,GAkBM20B,GACAS,EACAhB,EApBF3xB,GAAUE,IAAK,GAAIqR,MAAO,EAAGD,OAAQ,GAAIrR,KAAM,GAC7C4B,EAAQ,KACRC,EAAS,KACT1B,EAAiB,KACjBC,EAAkB,KAClBkC,EAAItH,GAAG8H,MAAMsI,UACbrB,KACAypB,EAAuB,mBACvBP,KACArC,KACAmB,KACAF,GAAe,EACf/qB,EAAQtM,EAAGG,MAAMuQ,eACjBkmB,KACAj0B,KACAq1B,KACA1B,KACAuC,EAAc,EAId7wB,EAAOxH,GAAG0V,IAAIlO,OACdkQ,EAAO1X,GAAG0V,IAAIgC,OACdxX,EAAWF,GAAGE,SAAQ,aAAe,QAAS,WAAY,kBAAmB,cAAe,eAAgB,mBAAoB,kBAAmB,mBAAoB,YAAa,iBAOtL+R,EAAczS,EAAGG,MAAMsS,YAAY/R,EAkcvC,OAzDAoC,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAgB6H,IAAK,WAAW,MAAO7H,IAAmB8H,IAAK,SAASvG,GAAGvB,EAAOuB,IAClFtB,QAAgB4H,IAAK,WAAW,MAAO5H,IAAmB6H,IAAK,SAASvG,GAAGtB,EAAQsB,IACnF8vB,eAAiBxpB,IAAK,WAAc,MAAOwpB,IAAkBvpB,IAAK,SAAUvG,GAAK8vB,EAAgB9vB,IACjG0uB,cAAgBpoB,IAAK,WAAc,MAAOooB,IAAiBnoB,IAAK,SAAUvG,GAAK0uB,EAAe1uB,IAC9FiuB,SAAW3nB,IAAK,WAAc,MAAO2nB,IAAY1nB,IAAK,SAAUvG,GAAKiuB,EAAUjuB,IAC/EhG,QAAUsM,IAAK,WAAc,MAAOtM,IAAWuM,IAAK,SAAUvG,GAAKhG,EAASgG,IAC5EkwB,aAAgB5pB,IAAK,WAAW,MAAO4pB,IAAmB3pB,IAAK,SAASvG,GAAGkwB,EAAclwB,IACzFqwB,sBAAwB/pB,IAAK,WAAW,MAAO+pB,IAAwB9pB,IAAK,SAASvG,GAAGqwB,EAAqBrwB,IAG7GuuB,YAAajoB,IAAK,WAAc,MAAOwpB,GAAchkB,IAAI,SAAUtM,GAAG,MAAOA,GAAEqE,OAAU0C,IAAK,SAAUvG,GAEpG3I,EAAGqC,WAAU,aAAe,6BACC,IAAzBo2B,EAAcr2B,OACduG,EAAEsB,QAAQ,SAAU2hB,GAAK6M,EAAch1B,MAAO+I,IAAKof,MAEnDjjB,EAAEsB,QAAQ,SAAU2hB,EAAG5oB,GAAKy1B,EAAcz1B,GAAGwJ,IAAKof,MAG1D2L,gBAAiBtoB,IAAK,WAAc,MAAOwpB,GAAchkB,IAAI,SAAUtM,GAAG,MAAOA,GAAEqE,OAAU0C,IAAK,SAAUvG,GAExG3I,EAAGqC,WAAU,iBAAmB,6BAChCk1B,KAC6B,IAAzBkB,EAAcr2B,OACduG,EAAEsB,QAAQ,SAAU2hB,GAAK6M,EAAch1B,MAAO+I,IAAKof,MAEnDjjB,EAAEsB,QAAQ,SAAU2hB,EAAG5oB,GAAKy1B,EAAcz1B,GAAGwJ,IAAMof,MAI3DwN,kBAAmBnqB,IAAK,WAAc,MAAOwpB,GAAchkB,IAAI,SAAUtM,GAAK,MAAOA,GAAEwE,UAAcuC,IAAK,SAAUvG,GAEhH3I,EAAGqC,WAAU,mBAAqB,6BACL,IAAzBo2B,EAAcr2B,OACduG,EAAEsB,QAAQ,SAAUyR,GAAK+c,EAAch1B,MAAOkJ,OAAQ+O,MAEtD/S,EAAEsB,QAAQ,SAAUyR,EAAG1Y,GAAKy1B,EAAcz1B,GAAG2J,OAAS+O,MAK9DnW,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAuB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC5DF,EAAOuR,MAAuBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC5DvR,EAAOsR,OAAuBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC5DtR,EAAOC,KAAuB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAEhE8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAGlC3I,EAAGG,MAAMqP,YAAY1M,GACdA,GCzeX9C,EAAGI,OAAOi5B,yBAA2B,WAC7B,YAsEA,SAASv2B,GAAMsB,GAgJX,MA/IAqO,GAAYW,QACZX,EAAYrS,OAAO+1B,GAEnB/xB,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EAEjB,IAEIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAmBnE,IAjBCzC,EAAMqR,OAAS,WAAajN,EAAU9F,KAAK0B,IAC3CA,EAAMoE,UAAYnG,KAElBgR,EAAMmC,OAAO+O,EAAYwV,GAAgB31B,EAAMqR,QAC1CH,OAAOkP,EAAYuV,IACnBtkB,SAGLpC,EAAM8L,SAAW4a,EAAchkB,IAAI,SAAUtM,GAAK,QAASA,EAAE0V,WAG7D4a,EAAgBA,EAAchkB,IAAI,SAAUtM,GAA+B,MAA3BA,GAAE0V,WAAa1V,EAAE0V,SAAiB1V,IAClFswB,EAAcxuB,QAAQ,SAAU9B,EAAGnF,GAC/BmF,EAAEmxB,iBAAmBnpB,MAAMhI,EAAEmxB,kBAAoBt2B,EAAImF,EAAEmxB,iBACvDnxB,EAAEiwB,gBAAkBjoB,MAAMhI,EAAEiwB,iBAAmBp1B,EAAImF,EAAEiwB,mBAGrDjV,EAAc,CACd,GAAI3W,EACJ2W,KACA,KAAI3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACrBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,IAAIlI,IAASA,EAAKlC,OAEd,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,QAMtC,IAAIf,GAAOJ,EAAUK,UAAS,yCAA0CjD,MAAMA,IAC1EkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,4CAA4CD,OAAM,KAElGyQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,8BACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBAEjCwQ,EAAEhR,OAAM,QACHQ,KAAI,QAAUhC,GACdgC,KAAI,SAAY/B,EAAkB,EAAKA,EAAkB,GAGzDie,GAGDC,EAAO1c,MAAMzB,GACR2G,MAAM,SAAUnE,GAAK,MAAO,qBAEjCgQ,EAAEhR,OAAM,kBACHyW,MAAM6a,EAAc7iB,KAAK,SAAU9F,EAAGuO,GAAK,MAAOvO,GAAEwpB,iBAAmBjb,EAAEib,oBACzEl4B,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAElE+B,EAAKH,OAAM,kBACPQ,KAAI,YAAc,kBAAqBpC,EAAOE,IAAO,MAdzD0S,EAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,SAgB9Cf,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAGvE0wB,EACK/uB,MAAMzB,GACN0B,OAAOzB,GACP6yB,cAAcA,GACdpB,aAAaA,EAExB,IAAIkC,GAA0BphB,EAAEhR,OAAM,gCAC7ByW,MAAMtZ,EAEfi1B,GAAwBprB,aAAa/M,KAAK+0B,GAM1CA,EAAoBz1B,SAASiB,GAAE,WAAa,SAAUgB,EAAQi1B,GACtDA,GACAP,GAAe,EACf32B,EAASm3B,SAASl1B,IAGlB00B,GAAe,IAIvBvT,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAI,GAAIxY,KAAOwY,GACXjT,EAAMvF,GAAOwY,EAASxY,EAE1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAIVgiB,EAAoBz1B,SAASiB,GAAE,kBAAoB,SAAUC,GACzD62B,EAAc7iB,KAAK,SAAU9F,EAAGuO,GAAK,MAAOvO,GAAEsoB,gBAAkB/Z,EAAE+Z,iBAClE,IAAIoB,IAAW,CACff,GAAcxuB,QAAQ,SAAU9B,EAAGnF,GAC/BmF,EAAEiwB,gBAAkBp1B,EAChBmF,EAAEiwB,kBAAoBjwB,EAAEmxB,mBACxBE,GAAW,KAEnB94B,EAAS23B,gBAAgBI,EAAee,KAItC94B,EAASiB,GAAE,cAAgB,SAAUC,GAEP,mBAAfA,GAAEic,WACT4a,EAAcxuB,QAAQ,SAAUmC,EAAQpJ,GACpCoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAEjC+O,EAAM8L,SAAWjc,EAAEic,UAEvB/a,EAAMqR,aAId1B,EAAYS,UAAS,sCACdpQ,EAjNX,GAAIqzB,GAAsBn2B,EAAGI,OAAO+1B,sBAChCrS,EAAS9jB,EAAGI,OAAO0jB,SACnB5jB,EAAUF,EAAGI,OAAOF,UAGpBqF,GAFmBvF,EAAGI,OAAOF,WAElBuF,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,IAChDue,EAAY,KACZ3c,EAAQ,KACRC,EAAS,KACTwc,GAAa,EACbvX,EAAQtM,EAAGG,MAAMuQ,eACjBqB,EAAQ/R,EAAGG,MAAM4R,QACjB0mB,KACApB,GAAe,EACflU,EAAe,KACfpM,EAAS,KACT0iB,EAAW,YACX/4B,EAAWF,GAAGE,SAAQ,kBAAoB,WAAY,cAAe,cAAe,aAUlF+R,EAAczS,EAAGG,MAAMsS,YAAY/R,GAEnCwiB,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,cAKjDoF,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACQlO,SAAjBkO,EAAMpP,QACL2B,EAAK2F,QAAQ,SAASmC,EAAQpJ,GAC1BoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,MA2PhD,OArPA9C,GAAQ6K,iBAAiB,SAASzG,GAC9B,GAAIo1B,GAAM,iFAAmFp1B,EAAKgI,MAAQ,4BAA8BhI,EAAKkI,IAAM,6BAUnJ,OAT0B,KAAvBlI,EAAK8H,OAAOhK,SAEXs3B,GAAY,2CACZp1B,EAAK8H,OAAOnC,QAAQ,SAAS9B,GACzBuxB,EAAMA,EAAM,mEAAqEvxB,EAAEmE,MAAQ,gCAAkCnE,EAAEqE,IAAM,0BAA4BrE,EAAE6D,MAAQ,eAE/K0tB,GAAY,YAEhBA,GAAY,aA+JhBvD,EAAoBz1B,SAASiB,GAAE,2BAA6B,SAAUoc,GAClE,GAAI4b,IACAntB,IAAKuR,EAAI1B,MACT/P,MAAOyR,EAAIzR,MACXF,UAED2R,GAAIlV,SACHkG,OAAOyZ,KAAKzK,EAAIlV,QAAQoB,QAAQ,SAAU9B,GACtC,GAAIyxB,GAAM7b,EAAImZ,WAAWzqB,OAAO,SAAUotB,GAAK,MAAOA,GAAGrtB,MAAQrE,IAAK,EACtE,IAAGyxB,EAAG,CACF,GAAI1wB,EAEAA,GADAiH,MAAM4N,EAAIlV,OAAOV,KAAOgI,MAAMuH,WAAWqG,EAAIlV,OAAOV,KAChDsxB,EAEAG,EAAIjtB,OAAOoR,EAAIlV,OAAOV,IAE9BwxB,EAAGvtB,OAAO3I,MAAO8hB,IAAKqU,EAAIxB,gBAAiB5rB,IAAKrE,EAAG6D,MAAO9C,EAAGoD,MAAOstB,EAAIttB,WAGhFqtB,EAAGvtB,OAAOwJ,KAAK,SAAS9F,EAAEuO,GAAI,MAAOvO,GAAEyV,IAAMlH,EAAEkH,OAEnDrlB,EAAQoE,KAAKq1B,GAAIzzB,QAAO,KAG5BiwB,EAAoBz1B,SAASiB,GAAE,0BAA4B,SAASoc,GAChE7d,EAAQgG,QAAO,KAGnBiwB,EAAoBz1B,SAASiB,GAAE,2BAA6B,WACxDzB,MAOJ4C,EAAMpC,SAAWA,EACjBoC,EAAMqzB,oBAAsBA,EAC5BrzB,EAAMghB,OAASA,EACfhhB,EAAM5C,QAAUA,EAChB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAS6H,IAAK,WAAc,MAAO7H,IAAU8H,IAAK,SAAUvG,GAAKvB,EAAQuB,IACzEtB,QAAU4H,IAAK,WAAc,MAAO5H,IAAW6H,IAAK,SAAUvG,GAAKtB,EAASsB,IAC5Ekb,YAAc5U,IAAK,WAAc,MAAO4U,IAAe3U,IAAK,SAAUvG,GAAKkb,EAAalb,IACxFwa,cAAgBlU,IAAK,WAAc,MAAOkU,IAAiBjU,IAAK,SAAUvG,GAAKwa,EAAexa,IAC9F8vB,eAAiBxpB,IAAK,WAAc,MAAOwpB,IAAkBvpB,IAAK,SAAUvG,GAAK8vB,EAAgB9vB,IACjG0uB,cAAgBpoB,IAAK,WAAc,MAAOooB,IAAiBnoB,IAAK,SAAUvG,GAAK0uB,EAAe1uB,IAC9FoO,QAAU9H,IAAK,WAAc,MAAO8H,IAAW7H,IAAK,SAAUvG,GAAKoO,EAASpO,IAC5E8wB,UAAYxqB,IAAK,WAAc,MAAOwqB,IAAavqB,IAAK,SAAUvG,GAAK8wB,EAAW9wB,IAGlFpD,QACI0J,IAAK,WAAc,MAAO1J,IAC1B2J,IAAK,SAAUvG,GACG9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAoBjT,SAAZ8E,EAAEmO,MAAsBnO,EAAEmO,MAAQvR,EAAOuR,MACxDvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAkB3B,SAAX8E,EAAEnD,KAAqBmD,EAAEnD,KAAOD,EAAOC,OAG7D8G,OAAQ2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GAC9C2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,GACb6pB,EAAoB7pB,MAAMA,OAItCtM,EAAGG,MAAMkW,eAAevT,EAAOqzB,GAC/Bn2B,EAAGG,MAAMqP,YAAY1M,GAEdA,GC1Sf9C,EAAGI,OAAO05B,IAAM,WACZ,YA2CA,SAASh3B,GAAMsB,GA+TX,MA9TAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GAgTpB,QAASy1B,GAASjqB,EAAGyV,GACjBzV,EAAEkqB,SAAW7pB,MAAML,EAAEkqB,UAAY,EAAIlqB,EAAEkqB,SACvClqB,EAAEmqB,WAAa9pB,MAAML,EAAEmqB,YAAc,EAAInqB,EAAEmqB,WACtCC,IAAOpqB,EAAEqqB,YAAc,EAC5B,IAAIn3B,GAAIxC,GAAGynB,YAAYlnB,KAAKq5B,SAAUtqB,EAEtC,OADA/O,MAAKq5B,SAAWp3B,EAAE,GACX,SAAUgT,GACb,MAAOqkB,GAAK9U,GAAKviB,EAAEgT,KAtT3B,GAAIrQ,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC5ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,OAC/CyS,EAAS5f,KAAKF,IAAI7D,EAAgBC,GAAmB,EACrD00B,KACAC,IAIN,IADArzB,EAAY1G,GAAG2G,OAAOpG,MACI,IAAtBy5B,EAAWp4B,OAGX,IAAK,GAFDq4B,GAAQnR,EAASA,EAAS,EAC1BoR,EAAQC,EAAarR,EAChBtmB,EAAI,EAAGA,EAAIsB,EAAK,GAAGlC,OAAQY,IAChCs3B,EAAgB72B,KAAKg3B,GACrBF,EAAgB92B,KAAKi3B,OAGtBE,IACCN,EAAkBE,EAAW/lB,IAAI,SAAUtM,GAAK,OAAQA,EAAEsyB,MAAQtyB,EAAEsyB,MAAQ,GAAKnR,IACjFiR,EAAkBC,EAAW/lB,IAAI,SAAUtM,GAAK,OAAQA,EAAEuyB,MAAQvyB,EAAEuyB,MAAQ,GAAKpR,IACjFqR,EAAan6B,GAAGgJ,IAAIgxB,EAAW/lB,IAAI,SAAUtM,GAAK,MAAQA,GAAEuyB,MAAQvyB,EAAEuyB,MAAQ,OAE9EJ,EAAkBE,EAAW/lB,IAAI,SAAUtM,GAAK,MAAOA,GAAEsyB,MAAQnR,IACjEiR,EAAkBC,EAAW/lB,IAAI,SAAUtM,GAAK,MAAOA,GAAEuyB,MAAQpR,IACjEqR,EAAan6B,GAAGgJ,IAAIgxB,EAAW/lB,IAAI,SAAUtM,GAAK,MAAOA,GAAEuyB,SAGnE16B,GAAGG,MAAMsW,QAAQvP,EAGjB,IAAII,GAAOJ,EAAUK,UAAS,mBAAoBjD,KAAKA,GACnDkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAO,gCAAoC4C,GACpFiT,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,KACf0zB,EAAQrd,EAAO9V,OAAM,KAAMC,KAAI,QAAU,SAC7C6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KACvE0S,EAAEhR,OAAM,WAAYQ,KAAI,YAAc,aAAehC,EAAiB,EAAI,IAAMC,EAAkB,EAAI,KACtGuS,EAAEhR,OAAM,iBAAkBQ,KAAI,YAAc,aAAehC,EAAiB,EAAI,IAAMC,EAAkB,EAAI,KAG5GsB,EAAUvF,GAAE,QAAU,SAASwG,EAAEnF,GAC7BtC,EAAS8gB,YACLld,KAAM6D,EACNiB,MAAOpG,EACPiK,IAAKzM,GAAGuE,MACRwF,GAAIA,MAIZ8vB,KACAS,IACA,KAAK,GAAI93B,GAAI,EAAGA,EAAIsB,EAAK,GAAGlC,OAAQY,IAAK,CAErC,GAAI+3B,GAAMv6B,GAAG0V,IAAI6kB,MAAMC,YAAYV,EAAgBt3B,IAC/Ci4B,EAAUz6B,GAAG0V,IAAI6kB,MAAMC,YAAYV,EAAgBt3B,GAAK,EAExDi3B,MAAe,IACfc,EAAId,WAAWA,GACfgB,EAAQhB,WAAWA,IAEnBD,KAAa,IACbe,EAAIf,SAASA,GACbiB,EAAQjB,SAASA,IAEjBE,IACAa,EAAIZ,YAAYI,EAAgBv3B,IAChCi4B,EAAQd,YAAYI,EAAgBv3B,KAGpC+3B,EAAIG,cAAgBA,IACpBH,EAAIG,aAAaA,GACjBD,EAAQC,aAAaA,IAGzBb,EAAK52B,KAAKs3B,GACVD,EAASr3B,KAAKw3B,GAIlB,GAAInB,GAAMt5B,GAAGmoB,OAAOmR,MACflkB,KAAK,MACL5J,MAAM,SAAS7D,GAAK,MAAOA,GAAE0V,SAAW,EAAIgE,EAAK1Z,IAGlD2xB,GAAIqB,UAAYA,GAChBrB,EAAIqB,SAASA,GAIbjB,GAASxZ,IACTma,EAAMnzB,OAAM,QAASC,KAAI,QAAU,gBAEnCL,EAAKH,OAAM,iBACNsD,MAAK,cAAgB,UACrByH,KAAK,SAAU/J,GACZ,MAAOuY,KAEVjW,MAAK,YAAef,KAAKF,IAAI7D,EAAgBC,GAAoB+0B,EAAa,GAAKja,EAAMte,OAAS,GAAK,MACvGuF,KAAI,KAAO,UACXA,KAAI,YAAc,SAASQ,EAAGnF,GAC3B,MAAO,gBAAiBo4B,EAAc,MAIlD,IAAIC,GAAS/zB,EAAKH,OAAM,WAAYI,UAAS,aAAcjD,KAAKw1B,GAC5DwB,EAAYh0B,EAAKH,OAAM,iBAAkBI,UAAS,aAAcjD,KAAKw1B,EAEzEuB,GAAOjzB,OAAOC,SACdizB,EAAUlzB,OAAOC,QAEjB,IAAIkzB,GAAKF,EAAO5zB,QAAQC,OAAM,IAC9B6zB,GAAG5zB,KAAI,QAAU,YACjB4zB,EAAG55B,GAAE,YAAc,SAASwG,EAAGnF,GAC3BxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GAC7BkwB,GACAp6B,GAAG2G,OAAOpG,MAAMoG,OAAM,QAASgH,aAC1B1F,SAAS,IACTd,KAAI,IAAMmzB,EAAS93B,IAE5BtC,EAASsb,kBACL1X,KAAM6D,EAAE7D,KACR8E,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5BiC,SAAUvE,EAAE6xB,SAAW7xB,EAAE8xB,aAAe,EAAIvwB,KAAK+P,QAGzD8hB,EAAG55B,GAAE,WAAa,SAASwG,EAAGnF,GAC1BxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GAC7BkwB,GACAp6B,GAAG2G,OAAOpG,MAAMoG,OAAM,QAASgH,aAC1B1F,SAAS,IACTd,KAAI,IAAM0yB,EAAKr3B,IAExBtC,EAASsF,iBAAiB1B,KAAM6D,EAAE7D,KAAM8E,MAAOpG,MAEnDu4B,EAAG55B,GAAE,YAAc,SAASwG,EAAGnF,GAC3BtC,EAASmG,kBAAkBvC,KAAM6D,EAAE7D,KAAM8E,MAAOpG,MAEpDu4B,EAAG55B,GAAE,QAAU,SAASwG,EAAGnF,GACvB,GAAIojB,GAAUrlB,IACdL,GAASqG,cACLzC,KAAM6D,EAAE7D,KACR8E,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5B1F,MAAOvE,GAAGuE,MACVqhB,QAASA,MAGjBmV,EAAG55B,GAAE,WAAa,SAASwG,EAAGnF,GAC1BtC,EAAS4lB,iBACLhiB,KAAM6D,EAAE7D,KACR8E,MAAOpG,EACPsJ,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,YAIpC4wB,EAAO1zB,KAAI,OAAS,SAASQ,EAAEnF,GAAK,MAAOsJ,GAAMnE,EAAE7D,KAAMtB,KACzDq4B,EAAO1zB,KAAI,SAAW,SAASQ,EAAEnF,GAAK,MAAOsJ,GAAMnE,EAAE7D,KAAMtB,IAE/Cu4B,GAAG7zB,OAAM,QAASrD,KAAK,SAAS8D,GACxCpH,KAAKq5B,SAAWjyB,GASpB,IANAkzB,EAAOl0B,OAAM,QACRgH,aACA1F,SAASA,GACTd,KAAI,IAAM,SAAUQ,EAAGnF,GAAK,MAAOq3B,GAAKr3B,GAAGmF,KAC3CqzB,UAAS,IAAMzB,GAEhB0B,EAAY,CAGZ,IAAK,GADDC,MACK14B,EAAI,EAAGA,EAAIsB,EAAK,GAAGlC,OAAQY,IAChC04B,EAAUj4B,KAAK42B,EAAKr3B,IAEhB24B,EACIzB,IACAwB,EAAU14B,GAAKxC,GAAG0V,IAAI6kB,MAAMC,YAAYX,EAAKr3B,GAAGg4B,eAC5Cf,KAAe,GAAOyB,EAAU14B,GAAGi3B,WAAWA,GAC9CD,KAAa,GAAO0B,EAAU14B,GAAGg3B,SAASA,IAE1CE,GACJwB,EAAU14B,GAAGm3B,YAAY,EAIrCmB,GAAU7zB,QAAQC,OAAM,KAAMgD,QAAO,YAAY,GAAMrG,KAAK,SAAS8D,EAAEnF,GACnE,GAAI44B,GAAQp7B,GAAG2G,OAAOpG,KAEtB66B,GAAMj0B,KAAI,YAAc,SAAUQ,EAAGnF,GACjC,GAAI64B,EAAoB,CACpB1zB,EAAE6yB,YAAcV,EAAgBt3B,GAAK,GACrCmF,EAAEgyB,YAAcG,EAAgBt3B,GAAK,EACrC,IAAI84B,IAAe3zB,EAAE8xB,WAAa9xB,EAAE6xB,UAAY,GAAK,IAAMtwB,KAAK+P,GAMhE,QALKtR,EAAE8xB,WAAa9xB,EAAE6xB,UAAY,EAAItwB,KAAK+P,GACvCqiB,GAAe,GAEfA,GAAe,GAEZ,aAAeJ,EAAU14B,GAAG+4B,SAAS5zB,GAAK,YAAc2zB,EAAc,IAI7E,MAFA3zB,GAAE6yB,YAAc1R,EAAS,GACzBnhB,EAAEgyB,YAAc7Q,EAAS,GAClB,aAAeoS,EAAU14B,GAAG+4B,SAAS5zB,GAAK,MAIzDyzB,EAAMl0B,OAAM,QACP+C,MAAK,SAAW,QAChBA,MAAK,OAAS,QACd9C,KAAI,KAAO,GACXA,KAAI,KAAO,GAEhBi0B,EAAMl0B,OAAM,QACP+C,MAAK,cAAgBoxB,GAAuB1zB,EAAE8xB,WAAa9xB,EAAE6xB,UAAY,EAAItwB,KAAK+P,GAAK,QAAU,MAAS,UAC1GhP,MAAK,OAAS,SAGvB,IAAIuxB,MACAC,EAAY,GACZC,GAAW,IACXC,GAAgB,SAASC,GACzB,MAAO1yB,MAAKwB,MAAMkxB,EAAY,GAAGF,IAAYA,GAAW,IAAMxyB,KAAKwB,MAAMkxB,EAAY,GAAGH,GAAaA,GAErGI,GAAqB,SAASl0B,GAC9B,OAAQA,EAAE6xB,SAAW7xB,EAAE8xB,aAAe,EAAIvwB,KAAK+P,IAGnD6hB,GAAU9oB,gBAAgBC,EAAa,cAAc9K,KAAI,YAAc,SAAUQ,EAAGnF,GAChF,GAAI64B,EAAoB,CACpB1zB,EAAE6yB,YAAcV,EAAgBt3B,GAAK,GACrCmF,EAAEgyB,YAAcG,EAAgBt3B,GAAK,EACrC,IAAI84B,IAAe3zB,EAAE8xB,WAAa9xB,EAAE6xB,UAAY,GAAK,IAAMtwB,KAAK+P,GAMhE,QALKtR,EAAE8xB,WAAa9xB,EAAE6xB,UAAY,EAAItwB,KAAK+P,GACvCqiB,GAAe,GAEfA,GAAe,GAEZ,aAAeJ,EAAU14B,GAAG+4B,SAAS5zB,GAAK,YAAc2zB,EAAc,IAE7E3zB,EAAE6yB,YAAc1R,EAAS,GACzBnhB,EAAEgyB,YAAc7Q,EAAS,EAOzB,IAAIgT,GAASZ,EAAU14B,GAAG+4B,SAAS5zB,GAC/BuE,EAAU2vB,GAAmBl0B,EACjC,IAAIA,EAAE6D,OAASU,GAAW6vB,EAAgB,CACtC,GAAIC,GAAUL,GAAcG,EACxBN,GAAkBQ,KAClBF,EAAO,IAAML,GAEjBD,EAAkBG,GAAcG,KAAW,EAE/C,MAAO,aAAeA,EAAS,MAIvChB,EAAUn0B,OAAM,kBACXsD,MAAK,cAAgB,SAAStC,EAAEnF,GAE7B,MAAO64B,IAAuB1zB,EAAE8xB,WAAa9xB,EAAE6xB,UAAY,EAAItwB,KAAK+P,GAAK,QAAU,MAAS,WAE/FvH,KAAK,SAAS/J,EAAGnF,GACd,GAAI0J,GAAU2vB,GAAmBl0B,GAC7BkU,EAAQ,EACZ,KAAKlU,EAAE6D,OAAmBuwB,EAAV7vB,EAA0B,MAAO,EAEjD,IAAwB,kBAAd+vB,GACNpgB,EAAQogB,EAAUt0B,EAAGnF,GACjBwJ,IAAOyN,EAAK9R,EAAE7D,MACd0H,MAAS6V,EAAK1Z,EAAE7D,MAChBoI,QAAW6Z,EAAY7Z,SAG3B,QAAQ+vB,GACJ,IAAK,MACDpgB,EAAQpC,EAAK9R,EAAE7D,KACf,MACJ,KAAK,QACD+X,EAAQkK,EAAY1E,EAAK1Z,EAAE7D,MAC3B,MACJ,KAAK,UACD+X,EAAQ7b,GAAGmM,OAAM,KAAMD,GAInC,MAAO2P,QAwBvB5J,EAAYS,UAAS,iBACdpQ,EApWX,GAAIyC,IAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,IACT4S,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9BhF,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrBjE,EAAY,KACZoF,EAAQtM,EAAGG,MAAMuQ,eACjB6V,EAAc/lB,GAAGmM,OAAM,QACvB8uB,GAAa,EACbE,GAAgB,EAChBc,EAAY,MACZF,EAAiB,IACjBrC,GAAQ,EACRxZ,GAAQ,EACRka,GAAc,EACdQ,EAAc,EACdS,GAAqB,EACrB5B,GAAa,EACbkB,GAAW,EACXnB,GAAW,EACXkB,EAAe,EACfP,EAAa,GACblyB,EAAW,IACX+xB,KACA95B,EAAWF,GAAGE,SAAQ,aAAe,eAAgB,kBAAmB,mBAAoB,kBAAmB,mBAAoB,aAGrI25B,KACAS,KAMAroB,EAAczS,EAAGG,MAAMsS,YAAY/R,EAwYvC,OAhEAoC,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpBwrB,YAAcvrB,IAAK,WAAc,MAAOurB,IAAetrB,IAAK,SAAUvG,GAAK6xB,EAAa7xB,IACxFvB,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtE8yB,YAAaxsB,IAAK,WAAW,MAAOwsB,IAAcvsB,IAAK,SAASvG,GAAG8yB,EAAW9yB,IAC9E+X,OAAazR,IAAK,WAAW,MAAOyR,IAASxR,IAAK,SAASvG,GAAG+X,EAAM/X,IACpEyyB,aAAiBnsB,IAAK,WAAW,MAAOmsB,IAAelsB,IAAK,SAASvG,GAAGyyB,EAAYzyB,IACpF4zB,gBAAiBttB,IAAK,WAAW,MAAOstB,IAAkBrtB,IAAK,SAASvG,GAAG4zB,EAAe5zB,IAC1F4d,aAAiBtX,IAAK,WAAW,MAAOsX,IAAerX,IAAK,SAASvG,GAAG4d,EAAY5d,IACpFb,GAAamH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKtR,IAClE4B,IAAa0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAC9DqxB,UAAa/qB,IAAK,WAAW,MAAO+qB,IAAY9qB,IAAK,SAASvG,GAAGqxB,EAASrxB,IAC1EsxB,YAAahrB,IAAK,WAAW,MAAOgrB,IAAc/qB,IAAK,SAASvG,GAAGsxB,EAAWtxB,IAC9EwyB,UAAalsB,IAAK,WAAW,MAAOksB,IAAYjsB,IAAK,SAASvG,GAAGwyB,EAASxyB,IAC1EuyB,cAAejsB,IAAK,WAAW,MAAOisB,IAAgBhsB,IAAK,SAASvG,GAAGuyB,EAAavyB,IACpFgyB,YAAe1rB,IAAK,WAAW,MAAO0rB,IAAczrB,IAAK,SAASvG,GAAGgyB,EAAWhyB,IAChFgzB,eAAgB1sB,IAAK,WAAW,MAAO0sB,IAAiBzsB,IAAK,SAASvG,GAAGgzB,EAAchzB,IACvFkzB,oBAAqB5sB,IAAK,WAAW,MAAO4sB,IAAsB3sB,IAAK,SAASvG,GAAGkzB,EAAmBlzB,IACtGuxB,OAAqBjrB,IAAK,WAAW,MAAOirB,IAAShrB,IAAK,SAASvG,GAAGuxB,EAAMvxB,IAC5EiyB,aAAqB3rB,IAAK,WAAW,MAAO2rB,IAAe1rB,IAAK,SAASvG,GAAGiyB,EAAYjyB,IAGxF+zB,kBAAmBztB,IAAK,WAAW,MAAO0sB,IAAiBzsB,IAAK,SAASvG,GACrEgzB,EAAchzB,EACd3I,EAAGqC,WAAU,mBAAqB,+BAGtCs6B,oBAAqB1tB,IAAK,WAAW,MAAO0sB,IAAiBzsB,IAAK,SAASvG,GACvEgzB,EAAchzB,EACd3I,EAAGqC,WAAU,qBAAuB,+BAGxCu6B,aAAc3tB,IAAK,WAAY,MAAOsX,IAAerX,IAAK,SAASvG,GAC/D4d,EAAY5d,EACZ3I,EAAGqC,WAAU,cAAa,6BAI9BkD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAA4B,mBAAZkD,GAAElD,IAAwBkD,EAAElD,IAASF,EAAOE,IACnEF,EAAOuR,MAA4B,mBAAZnO,GAAEmO,MAAwBnO,EAAEmO,MAASvR,EAAOuR,MACnEvR,EAAOsR,OAA4B,mBAAZlO,GAAEkO,OAAwBlO,EAAEkO,OAAStR,EAAOsR,OACnEtR,EAAOC,KAA4B,mBAAZmD,GAAEnD,KAAwBmD,EAAEnD,KAASD,EAAOC,OAEvEiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,KAEtB8G,GAAIN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAC7CkZ,EAAKrhB,GAAG4V,QAAQzN,KAEpB2D,OAAQ2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GAClD2D,EAAMtM,EAAGG,MAAMsQ,SAAS9H,KAE5B8zB,WAAqBxtB,IAAK,WAAW,MAAOwtB,IAAavtB,IAAK,SAASvG,GACnE8zB,EAAW9zB,GAAK,UAIxB3I,EAAGG,MAAMqP,YAAY1M,GACdA,GClbX9C,EAAGI,OAAOy8B,SAAW,WACjB,YA4DA,SAAS/5B,GAAMsB,GAkHX,MAjHAqO,GAAYW,QACZX,EAAYrS,OAAO05B,GAEnB11B,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EAEjB,IACIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAYlE,IAVAzC,EAAMqR,OAAS,WAAajN,EAAUiH,aAAa/M,KAAK0B,IACxDA,EAAMoE,UAAYnG,KAElBgR,EAAMmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QACjCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,IAAKlI,IAASA,EAAKlC,OAEf,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,QAItC,IAAIf,GAAOJ,EAAUK,UAAS,yBAA0BjD,MAAMA,IAC1DkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,4BAA4BD,OAAM,KAClFyQ,EAAI7Q,EAAKH,OAAM;AjCnG/B,AiCyGY,EjCzGV,CAAC,AiCqGSqW,EAAO9V,KjCrGT,CAAC,CAAC,AiCqGa,CjCrGZ,EAAE,CAAC,CiCqGeC,KAAI,CjCrGZ,CAAC,EAAE,CAAC,GAAG,AiCqGe,CjCrGd,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,EiCsG9B6V,EAAO9V,GjCtG8B,CAAC,GiCsGzB,GjCtG+B,EiCsGzBC,KAAI,QAAU,iBAG5Bkc,GAGD,GAAuB,QAAnB0K,EACAzK,EAAO1c,MAAOzB,GAAiB6G,IAAIstB,EAAIhyB,KAEvCR,EAAKH,OAAM,kBACNyW,MAAMtZ,GACNlD,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE+B,EAAKH,OAAM,kBACNQ,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,SACnD,IAAuB,UAAnB8oB,EAA4B,CACnC,GAAI7C,GAAc1rB,EAAGI,OAAO0jB,SAAS1c,OACZskB,GAArB/lB,EAAiB,IACjB+lB,EAAe/lB,EAAiB,GAEpCme,EAAOzc,OAAOzB,GAAiB4G,IAAIstB,EAAIhyB,KACvCgc,EAAO1c,MAAMskB,GACb/lB,GAAkBme,EAAO1c,QAEzBE,EAAKH,OAAM,kBACNyW,MAAMtZ,GACNlD,KAAK0iB,GACLnc,KAAI,YAAc,aAAe,EAAiB,YA5B3DwQ,GAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,QA+B9Cf,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAGvEq0B,EAAI1yB,MAAMzB,GAAgB0B,OAAOzB,EACjC,IAAIk3B,GAAU3kB,EAAEhR,OAAM,eAAgByW,OAAOtZ,GAC7C9D,IAAG2N,WAAW2uB,GAAS17B,KAAK04B,GAM5BhW,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAE1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAIVzT,EAASiB,GAAE,cAAgB,SAASC,GACN,mBAAfA,GAAEic,WACTvZ,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAEjC+O,EAAM8L,SAAWjc,EAAEic,UAEvB/a,EAAMqR,aAId1B,EAAYS,UAAS,sBACdpQ,EAxKX,GAAIg3B,GAAM95B,EAAGI,OAAO05B,MAChBhW,EAAS9jB,EAAGI,OAAO0jB,SACnB5jB,EAAUF,EAAGI,OAAOF,UAEpBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZ3c,EAAQ,KACRC,EAAS,KACT01B,GAAqB,EACrBlZ,GAAa,EACb0K,EAAiB,MACjBjiB,EAAQtM,EAAGG,MAAMuQ,eACjBqB,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACfpM,EAAS,KACTtO,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,cAAgB,cAAY,YAGxDR,GACKuI,SAAS,GACT+C,eAAc,GACdC,eAAe,SAAStD,EAAGnF,GACxB,MAAO82B,GAAIvT,cAAcpe,EAAGnF,IAOpC,IAAIyP,GAAczS,EAAGG,MAAMsS,YAAY/R,GAEnCwiB,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,cAKjDoF,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACSlO,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAAUmC,EAAQpJ,GAC3BoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,MAsMhD,OAnEA82B,GAAIp5B,SAASiB,GAAE,2BAA6B,SAASoc,GACjDA,EAAW,QACPvR,IAAK1J,EAAMgF,IAAIiW,EAAIzZ,MACnB0H,MAAOlJ,EAAMyM,IAAIwO,EAAIzZ,MACrBgI,MAAOyR,EAAIzR,MACXI,QAASqR,EAAIrR,SAEZqwB,UACMhf,GAAIrR,cACJqR,GAAI3R,OAAOM,SAEtBxM,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7B4zB,EAAIp5B,SAASiB,GAAE,0BAA4B,SAASoc,GAChD7d,EAAQgG,QAAO,KAGnB4zB,EAAIp5B,SAASiB,GAAE,2BAA6B,SAASoc,GACjD7d,MAQJ4C,EAAMghB,OAASA,EACfhhB,EAAMpC,SAAWA,EACjBoC,EAAMg3B,IAAMA,EACZh3B,EAAM5C,QAAUA,EAChB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAG1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAqB6H,IAAK,WAAW,MAAO7H,IAAwB8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAC3FtB,QAAqB4H,IAAK,WAAW,MAAO5H,IAAwB6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAC5FoO,QAAqB9H,IAAK,WAAW,MAAO8H,IAAwB7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAC5Fo0B,oBAAqB9tB,IAAK,WAAW,MAAO8tB,IAAwB7tB,IAAK,SAASvG,GAAGo0B,EAAmBp0B,IACxGkb,YAAqB5U,IAAK,WAAW,MAAO4U,IAAwB3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAChG4lB,gBAAqBtf,IAAK,WAAW,MAAOsf,IAAwBrf,IAAK,SAASvG,GAAG4lB,EAAe5lB,IACpGwa,cAAqBlU,IAAK,WAAW,MAAOkU,IAAwBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IAGlG2D,OAAQ2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GAClD2D,EAAQ3D,EACRmb,EAAOxX,MAAMA,GACbwtB,EAAIxtB,MAAMA,KAEd7D,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClBqxB,EAAIrxB,SAASA,KAEjBlD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,SAGnExF,EAAGG,MAAMkW,eAAevT,EAAOg3B,GAC/B95B,EAAGG,MAAMqP,YAAY1M,GACdA,GCzPX9C,EAAGI,OAAO48B,OAAS,WACf,YAqEA,SAASC,KACL5U,EAAMpe,QAAQ,SAASG,GAEnBA,EAAK8yB,eAEL9yB,EAAK+yB,iBAET/rB,EAAMnH,QAAQ,SAASmf,GACnB,GAAI1V,GAAS0V,EAAK1V,OACdxO,EAASkkB,EAAKlkB,MACI,iBAAXwO,KAAqBA,EAAS0V,EAAK1V,OAAS2U,EAAMe,EAAK1V,SAC5C,gBAAXxO,KAAqBA,EAASkkB,EAAKlkB,OAASmjB,EAAMe,EAAKlkB,SAClEwO,EAAOwpB,YAAYz5B,KAAK2lB,GACxBlkB,EAAOi4B,YAAY15B,KAAK2lB,KAKhC,QAASgU,KACL/U,EAAMpe,QAAQ,SAASG,GACnBA,EAAK4B,MAAQtC,KAAKL,IACd7I,GAAG68B,IAAIjzB,EAAK8yB,YAAalxB,GACzBxL,GAAG68B,IAAIjzB,EAAK+yB,YAAanxB,MASrC,QAASsxB,KASL,IAPA,GACIC,GADAC,EAAiBnV,EAEjBvgB,EAAI,EAKD01B,EAAep7B,QAAU0F,EAAIugB,EAAMjmB,QACtCm7B,KACAC,EAAevzB,QAAQ,SAASG,GAC5BA,EAAKtC,EAAIA,EACTsC,EAAKuY,GAAK8a,EACVrzB,EAAK8yB,YAAYjzB,QAAQ,SAASmf,GAC1BmU,EAAUpqB,QAAQiW,EAAKlkB,QAAU,GACjCq4B,EAAU95B,KAAK2lB,EAAKlkB,YAIhCs4B,EAAiBD,IACfz1B,CAKF41B,IACAC,EAAe71B,GAGnB81B,GAAmBluB,EAAK,GAAK+tB,IAAc31B,EAAI,IAWnD,QAAS61B,GAAe71B,GACpBugB,EAAMpe,QAAQ,SAASG,GACdA,EAAK8yB,YAAY96B,SAClBgI,EAAKtC,EAAIA,EAAI,KAKzB,QAAS81B,GAAkBC,GACvBxV,EAAMpe,QAAQ,SAASG,GACnBA,EAAKtC,GAAK+1B,IAKlB,QAASC,GAAkBC,GAqBvB,QAASC,KAEL,GAAIC,GAAKz9B,GAAGgJ,IAAI00B,EAAgB,SAAS7V,GACrC,OAAQ3Y,EAAK,IAAM2Y,EAAMjmB,OAAS,GAAK+7B,GAAe39B,GAAG68B,IAAIhV,EAAOrc,IAGxEkyB,GAAej0B,QAAQ,SAASoe,GAC5BA,EAAMpe,QAAQ,SAASG,EAAMpH,GACzBoH,EAAKmF,EAAIvM,EACToH,EAAKqN,GAAKrN,EAAK4B,MAAQiyB,MAI/B7sB,EAAMnH,QAAQ,SAASmf,GACnBA,EAAK3R,GAAK2R,EAAKpd,MAAQiyB,IAI/B,QAASG,GAAiBlV,GAWtB,QAASmV,GAAejV,GACpB,OAAQA,EAAK1V,OAAOnE,EAAI6Z,EAAKkV,GAAKlV,EAAK3R,GAAK,GAAK2R,EAAKpd,MAX1DkyB,EAAej0B,QAAQ,SAASoe,EAAOkW,GACnClW,EAAMpe,QAAQ,SAASG,GACnB,GAAIA,EAAK+yB,YAAY/6B,OAAQ,CAEzB,GAAImN,GAAI/O,GAAG68B,IAAIjzB,EAAK+yB,YAAakB,GAAkB79B,GAAG68B,IAAIjzB,EAAK+yB,YAAanxB,EAC5E5B,GAAKmF,IAAMA,EAAI+sB,EAAOlyB,IAAS8e,OAU/C,QAASsV,GAAiBtV,GAWtB,QAASuV,GAAerV,GACpB,OAAQA,EAAKlkB,OAAOqK,EAAI6Z,EAAKsV,GAAKtV,EAAK3R,GAAK,GAAK2R,EAAKpd,MAX1DkyB,EAAe/8B,QAAQmW,UAAUrN,QAAQ,SAASoe,GAC9CA,EAAMpe,QAAQ,SAASG,GACnB,GAAIA,EAAK8yB,YAAY96B,OAAQ,CAEzB,GAAImN,GAAI/O,GAAG68B,IAAIjzB,EAAK8yB,YAAauB,GAAkBj+B,GAAG68B,IAAIjzB,EAAK8yB,YAAalxB,EAC5E5B,GAAKmF,IAAMA,EAAI+sB,EAAOlyB,IAAS8e,OAU/C,QAASyV,KACLT,EAAej0B,QAAQ,SAASoe,GAC5B,GAAIje,GACAqN,EAGAzU,EAFA+iB,EAAK,EACLxT,EAAI8V,EAAMjmB,MAKd,KADAimB,EAAMzS,KAAKgpB,GACN57B,EAAI,EAAOuP,EAAJvP,IAASA,EACjBoH,EAAOie,EAAMrlB,GACbyU,EAAKsO,EAAK3b,EAAKmF,EACXkI,EAAK,IAAGrN,EAAKmF,GAAKkI,GACtBsO,EAAK3b,EAAKmF,EAAInF,EAAKqN,GAAK0mB,CAK5B,IADA1mB,EAAKsO,EAAKoY,EAAczuB,EAAK,GACzB+H,EAAK,EAIL,IAHAsO,EAAK3b,EAAKmF,GAAKkI,EAGVzU,EAAIuP,EAAI,EAAGvP,GAAK,IAAKA,EACtBoH,EAAOie,EAAMrlB,GACbyU,EAAKrN,EAAKmF,EAAInF,EAAKqN,GAAK0mB,EAAcpY,EAClCtO,EAAK,IAAGrN,EAAKmF,GAAKkI,GACtBsO,EAAK3b,EAAKmF,IAM1B,QAASqvB,GAAe9uB,EAAGuO,GACvB,MAAOvO,GAAEP,EAAI8O,EAAE9O,EAvGnB,GAAI2uB,GAAiB19B,GAAGq+B,OACnBryB,IAAI,SAASrE,GAAK,MAAOA,GAAEL,IAC3Bg3B,SAASt+B,GAAGu+B,WACZC,QAAQ3W,GACR5T,IAAI,SAAStM,GAAK,MAAOA,GAAEU,QAGhCm1B,KACAW,IACAM,GACA,KAAK,GAAI/V,GAAQ,EAAG6U,EAAa,IAAKA,EAClCS,EAAiBtV,GAAS,KAC1ByV,IACAM,IACAb,EAAiBlV,GACjByV,IACAM,IA6FR,QAASA,KAiBL,QAASC,GAAqBpvB,EAAGuO,GAC7B,MAAOvO,GAAE4D,OAAOnE,EAAI8O,EAAE3K,OAAOnE,EAGjC,QAAS4vB,GAAqBrvB,EAAGuO,GAC7B,MAAOvO,GAAE5K,OAAOqK,EAAI8O,EAAEnZ,OAAOqK,EArBjC8Y,EAAMpe,QAAQ,SAASG,GACnBA,EAAK8yB,YAAYtnB,KAAKupB,GACtB/0B,EAAK+yB,YAAYvnB,KAAKspB,KAE1B7W,EAAMpe,QAAQ,SAASG,GACnB,GAAIk0B,GAAK,EAAGI,EAAK,CACjBt0B,GAAK8yB,YAAYjzB,QAAQ,SAASmf,GAC9BA,EAAKkV,GAAKA,EACVA,GAAMlV,EAAK3R,KAEfrN,EAAK+yB,YAAYlzB,QAAQ,SAASmf,GAC9BA,EAAKsV,GAAKA,EACVA,GAAMtV,EAAK3R,OAcvB,QAASzL,GAAMlE,GACX,MAAOA,GAAEkE,MA7Rb,GAAIgxB,MACAS,EAAY,GACZU,EAAc,EACdzuB,GAAQ,EAAG,GACX2Y,KACAjX,KACAssB,GAAa,EAEb/U,EAAS,SAASoV,GAClBd,IACAG,IACAE,IACAQ,EAAkBC,IAGlBqB,EAAW,WACXH,KAIA7V,EAAO,WAGP,QAASA,GAAKjhB,GAEV,GAAIsY,GAAKtY,EAAEuL,OAAO5L,EAAIK,EAAEuL,OAAOiP,GAC3BlD,EAAKtX,EAAEjD,OAAO4C,EACdu3B,EAAK7+B,GAAG8+B,kBAAkB7e,EAAIhB,GAC9B4P,EAAKgQ,EAAGE,GACRC,EAAKH,EAAG,EAAIE,GACZxZ,EAAK5d,EAAEuL,OAAOnE,EAAIpH,EAAEm2B,GAAKn2B,EAAEsP,GAAK,EAChCmW,EAAKzlB,EAAEjD,OAAOqK,EAAIpH,EAAEu2B,GAAKv2B,EAAEsP,GAAK,EAChCgoB,EAAW,IAAMhf,EAAK,IAAMsF,EAC1B,IAAMsJ,EAAK,IAAMtJ,EACjB,IAAMyZ,EAAK,IAAM5R,EACjB,IAAMnO,EAAK,IAAMmO,CACvB,OAAO6R,GAfX,GAAIF,GAAY,EAwBhB,OANAnW,GAAKmW,UAAY,SAAS52B,GACtB,MAAKtH,WAAUe,QACfm9B,GAAa52B,EACNygB,GAFuBmW,GAK3BnW,GAIPkT,EAAS,SAASlyB,GAClB,MAAOA,GAAKmF,EAAInF,EAAKqN,GAAK,EAwQ9B,OA1BAulB,GAAOpuB,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKm8B,GAC3CA,EAAOluB,SAAWC,OAAOC,WACrByuB,WAAexuB,IAAK,WAAW,MAAOwuB,IAAevuB,IAAK,SAASvG,GAAG80B,GAAW90B,IACjFw1B,aAAelvB,IAAK,WAAW,MAAOkvB,IAAejvB,IAAK,SAASvG,GAAGw1B,EAAYx1B,IAClF0f,OAAepZ,IAAK,WAAW,MAAOoZ,IAAenZ,IAAK,SAASvG,GAAG0f,EAAM1f,IAC5EyI,OAAenC,IAAK,WAAW,MAAOmC,IAAelC,IAAK,SAASvG,GAAGyI,EAAMzI,IAC5E+G,MAAeT,IAAK,WAAW,MAAOS,IAAeR,IAAK,SAASvG,GAAG+G,EAAK/G,IAC3E+0B,YAAezuB,IAAK,WAAW,MAAOyuB,IAAexuB,IAAK,SAASvG,GAAG+0B,EAAW/0B,IAEjFggB,QAAe1Z,IAAK,WAAW0Z,EAAO,KAAezZ,IAAK,SAASvG,GAAGggB,EAAOhgB,KAC7Ey2B,UAAenwB,IAAK,WAAWmwB,KAAsBlwB,IAAK,SAASvG,MACnE2zB,QAAertB,IAAK,WAAW,MAAOqtB,MAAeptB,IAAK,SAASvG,GAC/C,kBAANA,KACN2zB,EAAO3zB,KAGfygB,MAAena,IAAK,WAAW,MAAOma,MAAela,IAAK,SAASvG,GAI/D,MAHgB,kBAANA,KACNygB,EAAKzgB,GAEFygB,QAIfppB,EAAGG,MAAMqP,YAAYwtB,GAEdA,GCrUXh9B,EAAGI,OAAOs/B,YAAc,WACpB,YAmDA,SAAS58B,GAAMsB,GAgJX,MA/IAA,GAAUC,KAAK,SAASC,GAqIpB,QAASq7B,GAASx3B,GACd3H,GAAG2G,OAAOpG,MAAM4G,KAAI,YACpB,aAAeQ,EAAEL,EAAI,KACjBK,EAAEoH,EAAI7F,KAAKL,IAAI,EAAGK,KAAKF,IAAInC,EAASc,EAAEsP,GAAIjX,GAAGuE,MAAMwK,KACnD,KACJytB,EAAOoC,WACPhW,EAAKzhB,KAAI,IAAM6rB,GAzInB,GAAIoM,IACAvX,QAEQje,KAAS,EAAG9H,KAAQ,WACpB8H,KAAS,EAAG9H,KAAQ,WACpB8H,KAAS,EAAG9H,KAAQ,WACpB8H,KAAS,EAAG9H,KAAQ,WACpB8H,KAAS,EAAG9H,KAAQ,WACpB8H,KAAS,EAAG9H,KAAQ,WAE5B8O,QAEQsC,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,OACpC0H,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,OACpC0H,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,OACpC0H,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,OACpC0H,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,MACpC0H,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,MACpC0H,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,MACpC0H,OAAW,EAAGxO,OAAU,EAAG8G,MAAS,OAK5C6zB,GAAc,EACdC,GAAgB,CAmBpB,KAf8B,gBAAlBx7B,GAAW,OAAmBA,EAAW,MAAGlC,SAAW,IACrC,gBAAlBkC,GAAW,OAAmBA,EAAW,MAAGlC,SAAW,IAE/Dy9B,GAAc,GAKdv7B,EAAW,OAAMA,EAAW,MAAGlC,OAAS,GACxCkC,EAAW,OAAMA,EAAW,MAAGlC,OAAS,IAExC09B,GAAgB,IAIhBD,EAIA,MAHA19B,SAAQ49B,MAAK,2BAA6B,0BAA2Bz7B,GACrEnC,QAAQI,KAAI,yBAA2Bq9B,EAAUtrB,KAAKC,UAAUqrB,IAChEI,EAAU57B,EAAW,yCACd,CAIX,KAAI07B,EAEA,MADAE,GAAU57B,EAAW,sBACd,CAMX,IAAI8R,GAAM9R,EAAUsD,OAAM,OACrBC,KAAI,QAAUP,GACdO,KAAI,SAAWN,GACfK,OAAM,KACNC,KAAI,QAAU,8BAGnBq1B,GACKS,UAAUA,GACVU,YAAYA,GACZzuB,MAAMtI,EAAOC,GAElB,IAAImsB,GAAOwJ,EAAO5T,MAElB4T,GACK3U,MAAM/jB,EAAK+jB,OACXjX,MAAM9M,EAAK8M,OACXuX,OAAO,IACP2T,OAAOA,EAGZ,IAAIlT,GAAOlT,EAAIxO,OAAM,KAAMH,UAAS,SAC/BjD,KAAKA,EAAK8M,OACV3J,QAAQC,OAAM,QACdC,KAAI,QAAU,QACdA,KAAI,IAAM6rB,GACV/oB,MAAK,eAAiB,SAAStC,GAAK,MAAOuB,MAAKL,IAAI,EAAGlB,EAAEsP,MAC7D7B,KAAK,SAAS9F,EAAEuO,GAAK,MAAOA,GAAE5G,GAAK3H,EAAE2H,IAGtC2R,GAAK1hB,OAAM,SACNwK,KAAK+tB,EAGV,IAAI71B,GAAO8L,EAAIxO,OAAM,KAAMH,UAAS,SAC/BjD,KAAKA,EAAK+jB,OACV5gB,QAAQC,OAAM,KACdC,KAAI,QAAU,QACdA,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAeA,EAAEL,EAAI,IAAMK,EAAEoH,EAAI,MACxEnO,KACGZ,GAAG6iB,SACEC,OACA4c,OAAO,SAAS/3B,GAAK,MAAOA,KAC5BxG,GAAE,YAAc,WACbZ,KAAK0Q,WAAW0uB,YAAYp/B,QAE/BY,GAAE,OAASg+B,GAIxBv1B,GAAK1C,OAAM,QACNC,KAAI,SAAW,SAASQ,GAAK,MAAOA,GAAEsP,KACtC9P,KAAI,QAAUq1B,EAAOS,aACrBhzB,MAAK,OAAS21B,GACd31B,MAAK,SAAW41B,GAChB34B,OAAM,SACNwK,KAAKouB,GAGVl2B,EAAK1C,OAAM,QACNC,KAAI,IAAM,IACVA,KAAI,IAAM,SAASQ,GAAK,MAAOA,GAAEsP,GAAK,IACtC9P,KAAI,KAAO,SACXA,KAAI,cAAgB,OACpBA,KAAI,YAAc,MAClBuK,KAAK,SAAS/J,GAAK,MAAOA,GAAE7F,OAC5BmK,OAAO,SAAStE,GAAK,MAAOA,GAAEL,EAAIV,EAAQ,IAC1CO,KAAI,IAAM,EAAIq1B,EAAOS,aACrB91B,KAAI,cAAgB,WAatB7E,EAzLX,GAAIyC,IAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3Cw3B,EAASh9B,EAAGI,OAAO48B,SACnB51B,EAAQ,IACRC,EAAS,IACTo2B,EAAY,GACZU,EAAe,GACfoC,EAAQ,QACRjE,EAASz4B,OAOX28B,EAAehgC,GAAGmM,OAAM,QACxBA,EAAS,SAASxE,GAClB,MAAOq4B,GAAar4B,GAAK,IAAMo4B,GAE/Bj0B,EAAQ9L,GAAG8H,MAAMuI,aACjBovB,EAAY,SAAS93B,GACrB,MAAOA,GAAEuL,OAAOpR,KAAO,MAAQ6F,EAAEjD,OAAO5C,KAAO,KAAOqK,EAAOxE,EAAE6D,QAE/Do0B,EAAgB,SAASj4B,GACzB,MAAOA,GAAEmE,MAAQA,EAAMnE,EAAE7F,KAAK+P,QAAO,MAAQ,MAE7CguB,EAAkB,SAASl4B,GAC3B,MAAO3H,IAAG8xB,IAAInqB,EAAEmE,OAAOimB,OAAO,IAE9B+N,EAAY,SAASn4B,GACrB,MAAOA,GAAE7F,KAAO,KAAOqK,EAAOxE,EAAE6D,QAGhCg0B,EAAY,SAAS5Z,EAASqa,GAC9Bra,EAAQ1e,OAAM,QACTC,KAAI,IAAM,GACVA,KAAI,IAAM,GACVA,KAAI,QAAU,2BACdA,KAAI,cAAgB,UACpBuK,KAAKuuB,GAwLd,OA9BA39B,GAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpBuxB,OAAkBtxB,IAAK,WAAW,MAAOsxB,IAAerxB,IAAK,SAASvG,GAAG43B,EAAM53B,IAC/EvB,OAAkB6H,IAAK,WAAW,MAAO7H,IAAe8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAC/EtB,QAAkB4H,IAAK,WAAW,MAAO5H,IAAe6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAChFgE,QAAkBsC,IAAK,WAAW,MAAOtC,IAAeuC,IAAK,SAASvG,GAAGgE,EAAOhE,IAChFs3B,WAAkBhxB,IAAK,WAAW,MAAOgxB,IAAe/wB,IAAK,SAASvG,GAAGs3B,EAAUt3B,IACnF80B,WAAkBxuB,IAAK,WAAW,MAAOwuB,IAAevuB,IAAK,SAASvG,GAAG80B,EAAU90B,IACnFw1B,aAAkBlvB,IAAK,WAAW,MAAOkvB,IAAejvB,IAAK,SAASvG,GAAGw1B,EAAYx1B,IACrF2zB,QAAkBrtB,IAAK,WAAW,MAAOqtB,IAAeptB,IAAK,SAASvG,GAAG2zB,EAAO3zB,IAGhFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/Dk7B,WAAYzxB,IAAK,WAAW,UAAaC,IAAK,SAASvG,GACnDy3B,EAAoCv8B,SAAlB8E,EAAEg4B,UAA4Bh4B,EAAEg4B,UAAcP,EAChEC,EAAoCx8B,SAAlB8E,EAAEi4B,YAA4Bj4B,EAAEi4B,YAAcP,EAChEC,EAAoCz8B,SAAlB8E,EAAE+X,MAA4B/X,EAAE+X,MAAc4f,MAKxEtgC,EAAGG,MAAMqP,YAAY1M,GAEdA,GCxOX9C,EAAGI,OAAOgtB,QAAU,WAChB,YA4DA,SAASyT,GAAS14B,GACd,GAAIqE,GAAK+rB,CAGT,OAFA/rB,GAAMrE,EAAE,GAAGiE,OAAS,IAAMjE,EAAE,GAC5BowB,EAAMuI,EAAOt0B,GAAOs0B,EAAOt0B,OAI/B,QAASu0B,GAAS54B,GACd,GAAIqE,EACJA,GAAMrE,EAAE,GAAGiE,OAAS,IAAMjE,EAAE,SACrB24B,GAAOt0B,GAGlB,QAASw0B,GAAS74B,GACd,GAAInF,GAAGwJ,EAAK+rB,EACR0I,EAAQJ,EAAS14B,GACjB+4B,GAAQ,CACZ,KAAKl+B,EAAI,EAAGA,EAAI3B,UAAUe,OAAQY,GAAK,EACnCwJ,EAAMnL,UAAU2B,GAChBu1B,EAAMl3B,UAAU2B,EAAI,GAAGmF,EAAE,GAAIA,EAAE,IAC3B84B,EAAMz0B,KAAS+rB,GAAQ0I,EAAME,eAAe30B,KAC5Cy0B,EAAMz0B,GAAO+rB,EACb2I,GAAQ,EAGhB,OAAOA,GAGX,QAASp+B,GAAMsB,GAwcX,MAvcAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GA2GpB,QAAS88B,KAKL,GAFAC,GAAc,GAETnf,EAAa,OAAO,CAGzB,IAAI0D,KAAe,EAAM,CACrB,GAAI0b,GAAW9gC,GAAGmf,MAAMrb,EAAKmQ,IAAI,SAASmnB,EAAO2F,GACzC,MAAO3F,GAAM/yB,OACR4L,IAAI,SAASnF,EAAO8S,GAKjB,GAAIof,GAAKvnB,EAAK3K,EAAM8S,GAChBqf,EAAK5f,EAAKvS,EAAM8S,EAEpB,QAAQpiB,EAAGG,MAAM8H,UAAUH,EAAE05B,IAAsB,KAAhB93B,KAAKyB,SAChCnL,EAAGG,MAAM8H,UAAUsH,EAAEkyB,IAAsB,KAAhB/3B,KAAKyB,SACpCo2B,EACAnf,EAAY9S,KAEnB7C,OAAO,SAASi1B,EAAYtf,GACzB,MAAO4F,GAAY0Z,EAAW,GAAItf,OAKlD,IAAuB,GAAnBkf,EAASl/B,OAAa,OAAO,CAC7Bk/B,GAASl/B,OAAS,IAElBk/B,EAAS79B,MAAMqE,EAAErB,QAAQ,GAAK,GAAI8I,EAAE9I,QAAQ,GAAK,GAAI,KAAM,OAC3D66B,EAAS79B,MAAMqE,EAAErB,QAAQ,GAAK,GAAI8I,EAAE9I,QAAQ,GAAK,GAAI,KAAM,OAC3D66B,EAAS79B,MAAMqE,EAAErB,QAAQ,GAAK,GAAI8I,EAAE9I,QAAQ,GAAK,GAAI,KAAM,OAC3D66B,EAAS79B,MAAMqE,EAAErB,QAAQ,GAAK,GAAI8I,EAAE9I,QAAQ,GAAK,GAAI,KAAM,OAK/D,IAAIk7B,GAASnhC,GAAGohC,KAAKC,UACjB,IAAI,MACJ,IAAKx6B,EAAS,KACbD,EAAQ,GAAGC,EAAS,KACpBD,EAAQ,GAAE,OAGX06B,EAAUthC,GAAGohC,KAAKE,QAAQR,GAAU7sB,IAAI,SAAStM,EAAGnF,GACpD,OACIsB,KAAQq9B,EAAOI,KAAK55B,GACpBiE,OAAUk1B,EAASt+B,GAAG,GACtBsM,MAASgyB,EAASt+B,GAAG,KAK7BsE,IAAKH,OAAM,mBAAoBI,UAAS,QAASc,QACjD,IAAI25B,GAAa16B,GAAKH,OAAM,mBAAoBI,UAAS,QAASjD,KAAKw9B,GACnEG,EAAcD,EACbv6B,QAAQC,OAAM,YACdC,KAAI,IAAM,SAASQ,GAChB,MAAKA,IAAMA,EAAE7D,MAA0B,IAAlB6D,EAAE7D,KAAKlC,OAGjB,IAAM+F,EAAE7D,KAAKsT,KAAI,KAAQ,IAFzB,UAIdjQ,KAAI,KAAO,SAASQ,EAAEnF,GACnB,MAAO,WAAWA,IACrB2E,KAAI,YAAc,SAASQ,EAAEnF,GAAK,MAAO,gBAAgBuH,EAAE,IAAKvH,EAAC,KAWtE,IAPIk/B,GACAD,EAAYx3B,MAAK,OAASjK,GAAG8xB,IAAI,IAAK,IAAK,MACtC7nB,MAAK,eAAiB,IACtBA,MAAK,iBAAmB,GACxBA,MAAK,SAAWjK,GAAG8xB,IAAI,IAAI,IAAI,MAGpC6P,EAAa,CAGb76B,GAAKH,OAAM,mBAAoBI,UAAS,KAAMc,QAC9C,IAAI+5B,GAAa96B,GAAKH,OAAM,mBAAoBI,UAAS,YAAajD,KAAKg9B,EACzDc,GACb36B,QAAQC,OAAM,gBACdC,KAAI,KAAO,SAASQ,EAAGnF,GAAK,MAAO,WAAWuH,EAAE,IAAKvH,IACrD0E,OAAM,cACNC,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAE,KAClCR,KAAI,KAAO,SAASQ,GAAK,MAAOA,GAAE,KAClCR,KAAI,IAAM06B,GAGnB,GAAIC,GAAqB,SAASC,EAAIp6B,EAAGq6B,GACrC,GAAInB,EAAa,MAAO,EACxB,IAAIj1B,GAAS9H,EAAK6D,EAAEiE,OACpB,IAAevI,SAAXuI,EAAJ,CACA,GAAIkD,GAASlD,EAAOvD,OAAOV,EAAEmH,MAC7BA,GAAY,MAAKhD,EAAMF,EAAQjE,EAAEiE,QAGjCkD,EAAQ,EAAK2K,EAAK3K,GAClBA,EAAQ,EAAKuS,EAAKvS,EAGlB,IAAIiK,GAAMrS,EAAUkD,OAAOmD,wBACvBk1B,EAAavgC,OAAOwgC,aAAer4B,SAAS0D,gBAAgB00B,UAC5DE,EAAazgC,OAAO0gC,aAAev4B,SAAS0D,gBAAgB40B,WAE5D11B,GACAzH,KAAMsC,EAAEmS,EAAK3K,EAAOnH,EAAEmH,QAAUiK,EAAI/T,KAAOm9B,EAAap9B,EAAOC,KAAO,GACtEC,IAAK8J,EAAEsS,EAAKvS,EAAOnH,EAAEmH,QAAUiK,EAAI9T,IAAMg9B,EAAYl9B,EAAOE,IAAM,GAGtE+8B,IACIlzB,MAAOA,EACPlD,OAAQA,EACRa,IAAKA,EACL41B,aAAc/6B,EAAEmS,EAAK3K,EAAOnH,EAAEmH,QAAU/J,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOnH,EAAEmH,QAAU/J,EAAOE,KACtFgf,YAAatc,EAAEiE,OACfgW,WAAYja,EAAEmH,MACdvK,MAAOvE,GAAGuE,MACVqhB,QAASmc,KAIjBP,GACKrgC,GAAE,QAAU,SAASwG,GAClBm6B,EAAmBvhC,KAAMoH,EAAGzH,EAASqG,gBAExCpF,GAAE,WAAa,SAASwG,GACrBm6B,EAAmBvhC,KAAMoH,EAAGzH,EAAS4lB,mBAExC3kB,GAAE,YAAc,SAASwG,GACtBm6B,EAAmBvhC,KAAMoH,EAAGzH,EAASsb,oBAExCra,GAAE,WAAa,SAASwG,EAAGnF,GACxBs/B,EAAmBvhC,KAAMoH,EAAGzH,EAASsF,uBAK7CsB,IAAKH,OAAM,cAAeI,UAAS,aAC9BA,UAAS,aAGT5F,GAAE,QAAU,SAASwG,EAAEnF,GAEpB,GAAIq+B,IAAgB/8B,EAAK6D,EAAEiE,QAAS,MAAO,EAC3C,IAAIA,GAAS9H,EAAK6D,EAAEiE,QAChBkD,EAASlD,EAAOvD,OAAO7F,GACvBojB,EAAUrlB,IACdL,GAASqG,cACLuI,MAAOA,EACPlD,OAAQA,EACRa,KAAMnF,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAClEo9B,aAAc/6B,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAC1Egf,YAAatc,EAAEiE,OACfgW,WAAYpf,EACZ+B,MAAOvE,GAAGuE,MACVqhB,QAASA,MAGhBzkB,GAAE,WAAa,SAASwG,EAAEnF,GACvB,GAAIq+B,IAAgB/8B,EAAK6D,EAAEiE,QAAS,MAAO,EAC3C,IAAIA,GAAS9H,EAAK6D,EAAEiE,QAChBkD,EAASlD,EAAOvD,OAAO7F,EAE3BtC,GAAS4lB,iBACLhX,MAAOA,EACPlD,OAAQA,EACRa,KAAMnF,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAClEo9B,aAAc/6B,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAC1Egf,YAAatc,EAAEiE,OACfgW,WAAYpf,MAGnBrB,GAAE,YAAc,SAASwG,EAAEnF,GACxB,GAAIq+B,IAAgB/8B,EAAK6D,EAAEiE,QAAS,MAAO,EAC3C,IAAIA,GAAS9H,EAAK6D,EAAEiE,QAChBkD,EAASlD,EAAOvD,OAAO7F,EAE3BtC,GAASsb,kBACL1M,MAAOA,EACPlD,OAAQA,EACRa,KAAMnF,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAClEo9B,aAAc/6B,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAC1Egf,YAAatc,EAAEiE,OACfgW,WAAYpf,EACZsJ,MAAOA,EAAMnE,EAAGnF,OAGvBrB,GAAE,WAAa,SAASwG,EAAEnF,GACvB,GAAIq+B,IAAgB/8B,EAAK6D,EAAEiE,QAAS,MAAO,EAC3C,IAAIA,GAAS9H,EAAK6D,EAAEiE,QAChBkD,EAASlD,EAAOvD,OAAO7F,EAE3BtC,GAASsF,iBACLsJ,MAAOA,EACPlD,OAAQA,EACRa,KAAMnF,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAClEo9B,aAAc/6B,EAAEmS,EAAK3K,EAAOtM,IAAMuC,EAAOC,KAAM+J,EAAEsS,EAAKvS,EAAOtM,IAAMuC,EAAOE,KAC1Egf,YAAatc,EAAEiE,OACfgW,WAAYpf,EACZsJ,MAAOA,EAAMnE,EAAGnF,OAvTpCkE,EAAY1G,GAAG2G,OAAOpG,KACtB,IAAI4E,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAElEvF,GAAGG,MAAMsW,QAAQvP,GAGjB5C,EAAK2F,QAAQ,SAASmC,EAAQpJ,GAC1BoJ,EAAOvD,OAAOoB,QAAQ,SAASqF,GAC3BA,EAAMlD,OAASpJ,KAKvB,IAAI8/B,GAAWhgC,EAAMoY,SAAS5Y,OAAS9B,GAAG8H,MAAMrG,MAAMK,MAAO,GAAO,EAEhEwjB,GAAc9L,GAAWI,GAAW2oB,KACpCviC,GAAGmf,MACCrb,EAAKmQ,IAAI,SAAStM,GACd,MAAOA,GAAEU,OAAO4L,IAAI,SAAStM,EAAEnF,GAC3B,OAAS8E,EAAGmS,EAAK9R,EAAEnF,GAAIuM,EAAGsS,EAAK1Z,EAAEnF,GAAI0M,KAAMszB,EAAQ76B,EAAEnF,QAapE,IARD8E,EAAKnB,OAAOqT,GAAWxZ,GAAGkf,OAAOoG,GAAWrR,IAAI,SAAStM,GAAK,MAAOA,GAAEL,IAAMpG,OAAOke,KAEhFwB,GAAW9c,EAAK,GAChBwD,EAAErB,MAAMyT,KAAYvU,EAAiBs9B,EAAgBt9B,IAAmB,EAAGrB,EAAK,GAAGuE,OAAOzG,QAASuD,EAAiBA,GAAkB,EAAIs9B,IAAiB,EAAI3+B,EAAK,GAAGuE,OAAOzG,UAG9K0F,EAAErB,MAAMyT,IAAW,EAAGvU,IAErBm9B,EAAU,CACP,GAAIt5B,IAAMhJ,GAAGgJ,IAAIsc,GAAWrR,IAAI,SAAStM,GAAK,MAAY,KAARA,EAAEoH,EAAgBpH,EAAEoH,EAAxB,SAC9CA,GAAEgO,OAAM,GACH5W,OAAOyT,GAAW5Z,GAAGkf,OAAOoG,GAAWrR,IAAI,SAAStM,GACjD,MAAY,KAARA,EAAEoH,EAAgBpH,EAAEoH,EACN,GAAN/F,KACb9H,OAAO4f,KACT7a,MAAM0U,IAAWvV,EAAiB,QAEnC2J,GAAE5I,OAAOyT,GAAW5Z,GAAGkf,OAAOoG,GAAWrR,IAAI,SAAUtM,GAAK,MAAOA,GAAEoH,IAAK7N,OAAO4f,KAChF7a,MAAM0U,IAAWvV,EAAiB,GAG/Cs9B,GAAKv8B,OAAOo8B,GAAcviC,GAAGkf,OAAOoG,GAAWrR,IAAI,SAAStM,GAAK,MAAOA,GAAEuH,OAAQhO,OAAOyhC,KACpF18B,MAAM28B,GAAaC,GAGxBpe,EAAcnd,EAAEnB,SAAS,KAAOmB,EAAEnB,SAAS,IAAM4I,EAAE5I,SAAS,KAAO4I,EAAE5I,SAAS,GAE1EmB,EAAEnB,SAAS,KAAOmB,EAAEnB,SAAS,KAC7BmB,EAAEnB,SAAS,GACPmB,EAAEnB,QAAQmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,GAAWmB,EAAEnB,SAAS,GAAqB,IAAhBmB,EAAEnB,SAAS,KACzEmB,EAAEnB,QAAM,GAAK,KAEnB4I,EAAE5I,SAAS,KAAO4I,EAAE5I,SAAS,KAC7B4I,EAAE5I,SAAS,GACP4I,EAAE5I,QAAQ4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,GAAW4I,EAAE5I,SAAS,GAAqB,IAAhB4I,EAAE5I,SAAS,KACzE4I,EAAE5I,QAAM,GAAK,KAElBwJ,MAAMrI,EAAEnB,SAAS,KAClBmB,EAAEnB,QAAM,GAAK,IAGZwJ,MAAMZ,EAAE5I,SAAS,KAClB4I,EAAE5I,QAAM,GAAK,IAGjB8Z,EAAKA,GAAM3Y,EACXie,EAAKA,GAAMxW,EACX+zB,EAAKA,GAAMJ,CAEX,IAAIK,IAAYz7B,EAAE,KAAO2Y,EAAG,IAAMlR,EAAE,KAAOwW,EAAG,IAAMmd,EAAE,KAAOI,EAAG,EAEhEE,GAASA,GAAUp8B,EACnBq8B,EAAUA,GAAWp8B,CAErB,IAAIq8B,IAAWF,IAAWp8B,GAASq8B,IAAYp8B,EAG3CC,GAAOJ,EAAUK,UAAS,wBAAyBjD,MAAMA,IACzDkD,GAAYF,GAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,oCAAsC4C,GACzFkT,GAAYjW,GAAUE,OAAM,QAC5B8V,GAAShW,GAAUE,OAAM,KACzByQ,GAAI7Q,GAAKH,OAAM,IAEnBG,IAAKoD,QAAO,kBAAoBua,GAChCzH,GAAO9V,OAAM,KAAMC,KAAI,QAAU,aACjC6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,kBACjCH,GAAUE,OAAM,KAAMC,KAAI,QAAU,kBAEpCL,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEvEgY,GAAU/V,OAAM,YACXC,KAAI,KAAO,gBAAkB4C,GAC7B7C,OAAM,QACNC,KAAI,YAAc,wBAEvBL,GAAKH,OAAM,iBAAoBoD,EAAK,SAC/B5C,KAAI,QAAUhC,EAAiB,IAC/BgC,KAAI,SAAY/B,EAAkB,EAAKA,EAAkB,GAAK,GAEnEuS,GAAExQ,KAAI,YAAc8Z,EAAW,qBAAuBlX,EAAK,IAAM,IAqNjE82B,GAAc,CACd,IAAIpb,IAAS3e,GAAKH,OAAM,cAAeI,UAAS,aAC3CjD,KAAK,SAAS6D,GAAK,MAAOA,IAAK,SAASA,GAAK,MAAOA,GAAEqE,KAC3DyZ,IAAOxe,QAAQC,OAAM,KAChB+C,MAAK,iBAAmB,MACxBA,MAAK,eAAiB,MAC3Bwb,GAAO7d,OACFC,SACL4d,GACKte,KAAI,QAAU,SAASQ,EAAEnF,GACtB,OAAQmF,EAAEuC,SAAW,IAAM,uBAAyB1H,IAEvD0H,QAAO,qBAAuBwX,GAC9BxX,QAAO,QAAU,SAASvC,GAAK,MAAOA,GAAEsT,QAC7CwK,GAAOzT,gBAAgBC,EAAa,mBAC/BhI,MAAK,OAAS,SAAStC,EAAEnF,GAAK,MAAOsJ,GAAMnE,EAAGnF,KAC9CyH,MAAK,SAAW,SAAStC,EAAEnF,GAAK,MAAOmF,GAAEw7B,kBAAoBA,GAAoBr3B,EAAMnE,EAAGnF,KAC1FyH,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,GAG3B,IAAIm5B,IAAS3d,GAAO1e,UAAS,iBACxBjD,KAAK,SAAS6D,GACX,MAAOA,GAAEU,OAAO4L,IACZ,SAAUnF,EAAO8S,GACb,OAAQ9S,EAAO8S,KAChB3V,OACC,SAASi1B,EAAYtf,GACjB,MAAO4F,GAAY0Z,EAAW,GAAItf,MAyCtD,IAtCAwhB,GAAOn8B,QAAQC,OAAM,QAChBC,KAAI,QAAU,SAAUQ,GACrB,MAAO,qBAAuBA,EAAE,KAEnCsC,MAAK,OAAS,SAAUtC,GAAK,MAAOA,GAAEmE,QACtC7B,MAAK,SAAW,SAAUtC,GAAK,MAAOA,GAAEmE,QACxC3E,KAAI,YAAc,SAASQ,GACxB,MAAO,aAAenI,EAAGG,MAAM8H,UAAUwY,EAAGxG,EAAK9R,EAAE,GAAGA,EAAE,MAAQ,IAAMnI,EAAGG,MAAM8H,UAAU8d,EAAGlE,EAAK1Z,EAAE,GAAGA,EAAE,MAAQ,MAEnHR,KAAI,IACD3H,EAAGG,MAAM4V,SACRrQ,KAAK,SAASyC,GAAK,MAAO07B,GAAS17B,EAAE,MACrCuH,KAAK,SAASvH,GAAK,MAAO+6B,GAAEF,EAAQ76B,EAAE,GAAGA,EAAE,QAEpDy7B,GAAOx7B,OAAO/D,KAAK08B,GAAU14B,SAC7B4d,GAAO7d,OAAOb,UAAS,iBAClBiL,gBAAgBC,EAAa,gBAC7B9K,KAAI,YAAc,SAASQ,GACxB,MAAO,aAAenI,EAAGG,MAAM8H,UAAUH,EAAEmS,EAAK9R,EAAE,GAAGA,EAAE,MAAQ,IAAMnI,EAAGG,MAAM8H,UAAUsH,EAAEsS,EAAK1Z,EAAE,GAAGA,EAAE,MAAQ,MAEjHE,SAELu7B,GAAOn3B,OAAO,SAAUtE,GAAK,MAAOo7B,KAAaG,IAAY1C,EAAS74B,EAAG,IAAK8R,EAAM,IAAK4H,KACpFrP,gBAAgBC,EAAa,kBAC7B9K,KAAI,YAAc,SAASQ,GAExB,MAAO,aAAenI,EAAGG,MAAM8H,UAAUH,EAAEmS,EAAK9R,EAAE,GAAGA,EAAE,MAAQ,IAAMnI,EAAGG,MAAM8H,UAAUsH,EAAEsS,EAAK1Z,EAAE,GAAGA,EAAE,MAAQ,MAGtHy7B,GAAOn3B,OAAO,SAAUtE,GAAK,MAAOo7B,KAAaG,IAAY1C,EAAS74B,EAAG,QAAS07B,EAAU,OAAQb,KAC/FxwB,gBAAgBC,EAAa,kBAC7B9K,KAAI,IACD3H,EAAGG,MAAM4V,SACRrQ,KAAK,SAASyC,GAAK,MAAO07B,GAAS17B,EAAE,MACrCuH,KAAK,SAASvH,GAAK,MAAO+6B,GAAEF,EAAQ76B,EAAE,GAAGA,EAAE,QAIjDszB,EACH,CACI,GAAIqI,IAAU7d,GAAO1e,UAAS,aACzBjD,KAAK,SAAS6D,GACX,MAAOA,GAAEU,OAAO4L,IACZ,SAAUnF,EAAO8S,GACb,OAAQ9S,EAAO8S,KAChB3V,OACC,SAASi1B,EAAYtf,GACjB,MAAO4F,GAAY0Z,EAAW,GAAItf,MAItD0hB,IAAOr8B,QAAQC,OAAM,QAChB+C,MAAK,OAAS,SAAUtC,EAAEnF,GACvB,MAAOmF,GAAEmE,QACZ7B,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,GACtB9C,KAAI,YAAc,SAASQ,GACxB,GAAIwa,GAAK3iB,EAAGG,MAAM8H,UAAUwY,EAAGxG,EAAK9R,EAAE,GAAGA,EAAE,MAAQuB,KAAK2f,KAAK6Z,EAAEF,EAAQ76B,EAAE,GAAGA,EAAE,KAAKuB,KAAK+P,IAAM,CAC9F,OAAO,aAAekJ,EAAK,IAAM3iB,EAAGG,MAAM8H,UAAU8d,EAAGlE,EAAK1Z,EAAE,GAAGA,EAAE,MAAQ,MAE9E+J,KAAK,SAAS/J,EAAEnF,GACb,MAAOmF,GAAE,GAAGkU,QAEpBynB,GAAO17B,OAAOC,SACd4d,GAAO7d,OAAOb,UAAS,iBAClBiL,gBAAgBC,EAAa,gBAC7B9K,KAAI,YAAc,SAASQ,GACxB,GAAIwa,GAAK3iB,EAAGG,MAAM8H,UAAUH,EAAEmS,EAAK9R,EAAE,GAAGA,EAAE,MAAOuB,KAAK2f,KAAK6Z,EAAEF,EAAQ76B,EAAE,GAAGA,EAAE,KAAKuB,KAAK+P,IAAI,CAC1F,OAAO,aAAekJ,EAAK,IAAM3iB,EAAGG,MAAM8H,UAAUsH,EAAEsS,EAAK1Z,EAAE,GAAGA,EAAE,MAAQ,MAE7EE,SACNy7B,GAAOz/B,KAAK,SAAS8D,GAClB3H,GAAG2G,OAAOpG,MACP2J,QAAO,YAAa,GACpBA,QAAO,YAAevC,EAAE,IAAI,GAC5BuC,QAAO,SAAS,KAErBo5B,GAAOtxB,gBAAgBC,EAAa,kBAC/B9K,KAAI,YAAc,SAASQ,GACxB,GAAIwa,GAAK3iB,EAAGG,MAAM8H,UAAUH,EAAEmS,EAAK9R,EAAE,GAAGA,EAAE,MAAOuB,KAAK2f,KAAK6Z,EAAEF,EAAQ76B,EAAE,GAAGA,EAAE,KAAKuB,KAAK+P,IAAI,CAC1F,OAAO,aAAekJ,EAAK,IAAM3iB,EAAGG,MAAM8H,UAAUsH,EAAEsS,EAAK1Z,EAAE,GAAGA,EAAE,MAAQ,MAKlF47B,GAEAC,aAAaC,GACbA,EAAY5gC,WAAW+9B,EAAwB2C,IAI/C3C,IAIJ3gB,EAAK3Y,EAAEgS,OACPiM,EAAKxW,EAAEuK,OACPwpB,EAAKJ,EAAEppB,OAEP0pB,EAASp8B,EACTq8B,EAAUp8B,IAGdoL,EAAYS,UAAS,qBACdpQ,EA1hBX,GA4CI2d,GAAIsF,EAAIud,EACNE,EACAC,EACAQ,EA/CF1+B,GAAgBE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GACjD4B,EAAe,KACfC,EAAe,KACfiF,EAAetM,EAAGG,MAAMuQ,eACxBizB,EAAmB,KACnBp5B,EAAeb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UAC/BjE,EAAe,KACfY,EAAetH,GAAG8H,MAAMC,SACxBgH,EAAe/O,GAAG8H,MAAMC,SACxB26B,EAAe1iC,GAAG8H,MAAMC,SACxB0R,EAAe,SAAS9R,GAAK,MAAOA,GAAEL,GACtC+Z,EAAe,SAAS1Z,GAAK,MAAOA,GAAEoH,GACtCyzB,EAAe,SAAS76B,GAAK,MAAOA,GAAEuH,MAAQ,GAC9Cm0B,EAAe,SAAS17B,GAAK,MAAOA,GAAE+7B,OAAS,UAC/CtkB,KACA0B,KACA6hB,KACAjhB,GAAe,EACf8F,EAAe,SAAS7f,GAAK,OAAQA,EAAEg8B,WACvC/iB,GAAe,EACf6hB,EAAe,GACfxhB,GAAe,EACf0gB,GAAe,EACfD,GAAe,EACfG,EAAe,WAAa,MAAO,KACnCroB,EAAe,KACfI,EAAe,KACfF,EAAe,KACfiB,EAAe,KACf4nB,EAAe,KACfK,EAAe,KACfne,GAAe,EACfvkB,EAAeF,GAAGE,SAAQ,eAAiB,kBAAmB,mBAAoB,kBAAmB,aACrGklB,GAAe,EACfnd,EAAe,IACfs7B,EAAyB,IACzBtI,GAAgB,EAYhB4F,GAAc,EACd5uB,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,GAC7C46B,GAAkB,GAAI,KACtBvC,IAskBN,OAxFAh+B,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAG1CA,EAAMkS,OAAS,GAAI,YACfjU,KAAKuhB,gBAAkB,WAInB,MAHAtiB,GAAGO,IAAIqD,MAAM,WACTsD,EAAUK,UAAS,mBAAoBmD,QAAO,SAAU,KAErD,MAEX3J,KAAKohB,eAAiB,SAAUsC,EAAarC,EAAYC,GACrDriB,EAAGO,IAAIqD,MAAM,WACTsD,EAAUC,OAAM,cACbI,UAAS,cAAiBkd,GAC1Bld,UAAS,aAAgB6a,GACzB1X,QAAO,QAAU2X,OAMhC3hB,EAASiB,GAAE,yBAA2B,SAASwG,GACvC+Z,GAAapf,EAAMkS,OAAOmN,eAAeha,EAAEsc,YAAYtc,EAAEia,YAAW,KAG5E1hB,EAASiB,GAAE,wBAA0B,SAASwG,GACtC+Z,GAAapf,EAAMkS,OAAOmN,eAAeha,EAAEsc,YAAYtc,EAAEia,YAAW,KAG5Etf,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAe6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACtEtB,QAAe4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACxEvC,QAAe6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IAC9DuS,QAAejM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IAC9Dy7B,YAAen1B,IAAK,WAAW,MAAOi0B,IAAKh0B,IAAK,SAASvG,GAAGu6B,EAAEv6B,IAC9DqR,SAAe/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IAC1EyR,SAAenL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IAC1EolB,aAAe9e,IAAK,WAAW,MAAO8zB,IAAc7zB,IAAK,SAASvG,GAAGo6B,EAAWp6B,IAChFuR,QAAejL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IACxEwS,QAAelM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IACxE07B,YAAep1B,IAAK,WAAW,MAAOm0B,IAAal0B,IAAK,SAASvG,GAAGy6B,EAAUz6B,IAC9EiX,QAAe3Q,IAAK,WAAW,MAAO2Q,IAAU1Q,IAAK,SAASvG,GAAGiX,EAAOjX,IACxE2Y,QAAerS,IAAK,WAAW,MAAOqS,IAAUpS,IAAK,SAASvG,GAAG2Y,EAAO3Y,IACxE27B,YAAer1B,IAAK,WAAW,MAAOk0B,IAAaj0B,IAAK,SAASvG,GAAGw6B,EAAUx6B,IAC9EuZ,aAAejT,IAAK,WAAW,MAAOiT,IAAehT,IAAK,SAASvG,GAAGuZ,EAAYvZ,IAClFqf,aAAe/Y,IAAK,WAAW,MAAO+Y,IAAe9Y,IAAK,SAASvG,GAAGqf,EAAYrf,IAClFs6B,cAAeh0B,IAAK,WAAW,MAAOg0B,IAAgB/zB,IAAK,SAASvG,GAAGs6B,EAAat6B,IACpFyY,SAAenS,IAAK,WAAW,MAAOmS,IAAWlS,IAAK,SAASvG,GAAGyY,EAAQzY,IAC1E8Y,UAAexS,IAAK,WAAW,MAAOwS,IAAYvS,IAAK,SAASvG,GAAG8Y,EAAS9Y,IAC5Ew5B,aAAelzB,IAAK,WAAW,MAAOkzB,IAAejzB,IAAK,SAASvG,GAAGw5B,EAAYx5B,IAClF05B,YAAepzB,IAAK,WAAW,MAAOozB,IAAcnzB,IAAK,SAASvG,GAAG05B,EAAW15B,IAChFu5B,aAAgBjzB,IAAK,WAAW,MAAOizB,IAAehzB,IAAK,SAASvG,GAAGu5B,EAAYv5B,IACnF4B,IAAe0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAChEo7B,wBAAyB90B,IAAI,WAAW,MAAO80B,IAA0B70B,IAAK,SAASvG,GAAGo7B,EAAuBp7B,IACjH8yB,YAAaxsB,IAAK,WAAW,MAAOwsB,IAAcvsB,IAAK,SAASvG,GAAI8yB,EAAa9yB,IACjFg7B,kBAAmB10B,IAAK,WAAW,MAAO00B,IAAoBz0B,IAAK,SAASvG,GAAGg7B,EAAiBh7B,IAGhGb,GAAQmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAOzZ,GAAG4V,QAAQzN,KAC1E4G,GAAQN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAOrhB,GAAG4V,QAAQzN,KAC1EmlB,WAAY7e,IAAK,WAAW,MAAO+zB,IAAW9zB,IAAK,SAASvG,GAAGq6B,EAAUxiC,GAAG4V,QAAQzN,KACpF47B,YAAat1B,IAAK,WAAW,MAAO40B,IAAY30B,IAAK,SAASvG,GAAGk7B,EAAWrjC,GAAG4V,QAAQzN,KAGvFpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,KAEtB6D,OAAQ2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GAClD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9Bid,YAAa3W,IAAK,WAAW,MAAO2W,IAAc1W,IAAK,SAASvG,GAC5Did,EAAajd,EACTid,KAAe,IACfuc,GAAc,OAK1BniC,EAAGG,MAAMqP,YAAY1M,GACdA,GChoBX9C,EAAGI,OAAOokC,aAAe,WACrB,YA6EA,SAAS1hC,GAAMsB,GA+PX,MA9PAqO,GAAYW,QACZX,EAAYrS,OAAOgtB,GACflQ,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAC9BonB,GAAWhyB,EAAYrS,OAAOskC,GAC9BC,GAAWlyB,EAAYrS,OAAOwkC,GAElCxgC,EAAUC,KAAK,SAASC,GAGpB4C,EAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,EAEjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAkBlE,IAhBAzC,EAAMqR,OAAS,WACM,IAAb1L,EACAvB,EAAU9F,KAAK0B,GAEfoE,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAEvDA,EAAMoE,UAAYnG,KAElBgR,EACKmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QAChCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,KAAKlI,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAG9E,MAFApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GACvBuL,EAAYS,UAAS,qBACdpQ,CAEPoE,GAAUK,UAAS,cAAec,SAItCP,EAAIslB,EAAQhnB,SACZmJ,EAAI6d,EAAQlS,QAGZ,IAAI5T,GAAOJ,EAAUK,UAAS,6BAA8BjD,MAAMA,IAC9DkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,yCAA2CylB,EAAQ7iB,MACtGiT,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAkBnB,IAfAqW,EAAO9V,OAAM,QAASC,KAAI,QAAU,sBAAsB8C,MAAK,iBAAgB,QAE/E+S,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,0BACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,eACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,iBAE7B+V,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAItDke,EAEE,CACH,GAAI6H,GAAc/lB,CAClBme,GAAO1c,MAAMskB,GAEbpkB,EAAKH,OAAM,kBACNyW,MAAMtZ,GACNlD,KAAK0iB,GAELC,GAAaD,EAAOzc,WAAa9B,EAAOE,MACzCF,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAGlE+B,EAAKH,OAAM,kBACNQ,KAAI,YAAc,gBAAwBpC,EAAOE,IAAK,SAf3D0S,GAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,QAkB9Cf,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAGvE2nB,EACKhmB,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GAEvB,MADAmF,GAAEmE,MAAQnE,EAAEmE,OAASA,EAAMnE,EAAGnF,GACvBmF,EAAEmE,QACVG,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,YACzC4d,WAAWA,GAEhBn0B,EAAKH,OAAM,mBACNyW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,YAC1Czc,KAAKgsB,GAGV9lB,EAAKH,OAAM,2BACNQ,KAAI,YAAc,qBAAuBylB,EAAQ7iB,KAAO,IAE7D,IAAIs6B,GAAUv9B,EAAKH,OAAM,2BAA4BI,UAAS,gBACzDjD,KAAK,SAAU6D,GACZ,MAAOA,IAGf08B,GAAQp9B,QAAQC,OAAM,KAAMC,KAAI,QAAU,cAE1C,IAAIm9B,GAAUD,EAAQt9B,UAAS,eAC1BjD,KAAK,SAAU6D,GACZ,OAAQA,IAGhB28B,GAAQr9B,QACHC,OAAM,QAASC,KAAI,QAAU,cAC7B8C,MAAK,iBAAmB,GAG7Bq6B,EAAQr4B,OAAO,SAAStE,GACpB,MAAOA,GAAE48B,WAAa58B,EAAE68B,QAEvBxyB,gBAAgBC,EAAa,iCAC7B9K,KAAI,KAAOG,EAAErB,QAAQ,IACrBkB,KAAI,KAAOG,EAAErB,QAAQ,IACrBkB,KAAI,KAAO,SAAUQ,EAAGnF,GACrB,MAAOuM,GAAEzH,EAAEnB,SAAS,GAAKwB,EAAE68B,MAAQ78B,EAAE48B,aAExCp9B,KAAI,KAAO,SAAUQ,EAAGnF,GACrB,MAAOuM,GAAEzH,EAAEnB,SAAS,GAAKwB,EAAE68B,MAAQ78B,EAAE48B,aAExCt6B,MAAK,SAAW,SAAUtC,EAAGnF,EAAGwY,GAC7B,MAAOlP,GAAMnE,EAAGqT,KAEnB/Q,MAAK,iBAAmB,SAAUtC,EAAGnF,GAClC,MAAQmF,GAAE0V,UAA+B,mBAAZ1V,GAAE68B,OAAgD,mBAAhB78B,GAAE48B,UAA6B,EAAI,IAItG7nB,IACAC,EACK7U,MAAMR,GACNid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAWlY,EAAkB,GAElCuS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,KAClDrF,KAAK+b,IAGVC,IACAC,EACK/U,MAAMiH,GACNwV,OAAQ/kB,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,IAChDwZ,UAAWnY,EAAgB,GAEhCwS,EAAEhR,OAAM,iBACH/F,KAAKic,IAIVonB,IACAC,EACK1d,QAAQoG,EAAQtlB,KAChBQ,MAAMR,GACNV,MAAMzB,GACN2G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,YAC9CL,EAAOrW,OAAM,gBAAiBO,OAAM,KAC/BC,KAAI,QAAU,oBACnBwQ,EAAEhR,OAAM,qBACHQ,KAAI,YAAc,eAAiB4H,EAAE9I,QAAQ,GAAK,KAClDmX,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,YAC1Czc,KAAKsjC,IAGVC,IACAC,EACK5d,QAAQoG,EAAQ7d,KAChBjH,MAAMiH,GACNnI,MAAMxB,GACN0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,YAC9CL,EAAOrW,OAAM,gBAAiBO,OAAM,KAC/BC,KAAI,QAAU,oBACnBwQ,EAAEhR,OAAM,qBACHQ,KAAI,YAAc,cAAgB+V,EAAkB/X,GAAkBi/B,EAAMl1B,QAAW,OACvFkO,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,YAC1Czc,KAAKwjC,IAOd9gB,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAC1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAIVzT,EAASiB,GAAE,cAAgB,SAASC,GACN,mBAAfA,GAAEic,WACTvZ,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAEjC+O,EAAM8L,SAAWjc,EAAEic,UAEvB/a,EAAMqR,WAIViZ,EAAQ1sB,SAASiB,GAAE,0BAA4B,SAASoc,GACpD7d,EAAQgG,QAAO,GACfgB,EAAUC,OAAM,aAAgBimB,EAAQ7iB,KAAO,eAAiBwT,EAAI0G,YAAc,cAAgB1G,EAAIqE,YACjGza,KAAI,KAAO,GAChBT,EAAUC,OAAM,aAAgBimB,EAAQ7iB,KAAO,eAAiBwT,EAAI0G,YAAc,cAAgB1G,EAAIqE,YACjGza,KAAI,KAAOi9B,EAAMl1B,UAG1B0d,EAAQ1sB,SAASiB,GAAE,2BAA6B,SAASoc,GACrD7W,EAAUC,OAAM,cAAiB4W,EAAI0G,YAAc,cAAgB1G,EAAIqE,YAClEza,KAAI,KAAOoW,EAAI8kB,YAAY,GAAKj9B,GACrCsB,EAAUC,OAAM,cAAiB4W,EAAI0G,YAAc,cAAgB1G,EAAIqE,YAClEza,KAAI,KAAOoW,EAAI8kB,YAAY,GAAK6B,EAAMh1B,QAC3CxP,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAI7Bua,EAAK3Y,EAAEgS,OACPiM,EAAKxW,EAAEuK,SAIXrH,EAAYS,UAAS,+BACdpQ,EAtUX,GAAIsqB,GAAeptB,EAAGI,OAAOgtB,UACvBjQ,EAAend,EAAGI,OAAO8X,OACzBmF,EAAerd,EAAGI,OAAO8X,OACzB4L,EAAe9jB,EAAGI,OAAO0jB,SACzB4gB,EAAe1kC,EAAGI,OAAOwmB,eACzBge,EAAe5kC,EAAGI,OAAOwmB,eACzB1mB,EAAeF,EAAGI,OAAOF,UAG3BqF,GAAgBE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IACpDue,EAAY,KACZ3c,EAAe,KACfC,EAAe,KACfH,EAAe,KACfoF,EAAetM,EAAGG,MAAMuQ,eACxB5I,EAAeslB,EAAQhnB,SACvBmJ,EAAe6d,EAAQlS,SACvBupB,GAAe,EACfE,GAAe,EACf9gB,GAAe,EACf3G,GAAe,EACfE,GAAe,EACfM,GAAkB,EAClB3L,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACfziB,EAAWF,GAAGE,SAAQ,cAAgB,cAAe,aACrDqW,EAAe,KACftO,EAAW,IACXgzB,GAAgB,CAGtBrO,GAAQhnB,OAAO0B,GAAGoT,OAAO3L,GACzB4N,EAAM9E,OAAM,UAAWW,YAAY,IACnCqE,EACKhF,OAAM,EAAqB,QAAU,QACrCW,YAAY,IAEjB0rB,EAAMxsB,KAAI,KACV0sB,EAAM1sB,KAAI,KACVhY,EACKwL,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAEhCyI,eAAe,SAAStD,EAAGnF,GACxB,MAAOqa,GAAM7E,aAAarQ,EAAGnF,IAOrC,IAAIyd,GAAIsF,EACFtT,EAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,GAE/Cya,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,cAKjDoF,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACSlO,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,MA6ThD,OAjDAF,GAAMpC,SAAWA,EACjBoC,EAAMsqB,QAAUA,EAChBtqB,EAAMghB,OAASA,EACfhhB,EAAMqa,MAAQA,EACdra,EAAMua,MAAQA,EACdva,EAAM4hC,MAAQA,EACd5hC,EAAM8hC,MAAQA,EACd9hC,EAAM5C,QAAUA,EAEhB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAC1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEzB,WAAa+H,IAAK,WAAW,MAAO/H,IAAagI,IAAK,SAASvG,GAAGzB,EAAUyB,IAC5E87B,WAAax1B,IAAK,WAAW,MAAOw1B,IAAav1B,IAAK,SAASvG,GAAG87B,EAAU97B,IAC5Eg8B,WAAa11B,IAAK,WAAW,MAAO01B,IAAaz1B,IAAK,SAASvG,GAAGg8B,EAAUh8B,IAC5Ekb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9EuU,WAAajO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAC5EyU,WAAanO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC5Ewa,cAAmBlU,IAAK,WAAW,MAAOkU,IAAgBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IACxFoO,QAAa9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IACtEF,UAAawG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GAAGF,EAASE,IAC1E8yB,YAAaxsB,IAAK,WAAW,MAAOwsB,IAAcvsB,IAAK,SAASvG,GAAG8yB,EAAW9yB,IAG9EpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DkY,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQ,EAAM,QAAU,UAElC/L,OAAQ2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GAClD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,GACbo4B,EAAMp4B,MAAMA,GACZs4B,EAAMt4B,MAAMA,OAIpBtM,EAAGG,MAAMkW,eAAevT,EAAOsqB,GAC/BptB,EAAGG,MAAMqP,YAAY1M,GACdA,GCtYX9C,EAAGI,OAAO6kC,UAAY,WAClB,YA+BA,SAASniC,GAAMsB,GAkEX,MAjEAqO,GAAYW,QACZhP,EAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD3P,GAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,GAGjBY,EAAKnB,OAAOqT,GAAWxZ,GAAGkf,OAAOpb,EAAM2V,IAClCxT,MAAMyT,IAAW,EAAGvU,IAEzB4J,EAAK5I,OAAOyT,GAAW5Z,GAAGkf,OAAOpb,EAAMud,IAClCpb,MAAM0U,IAAWvV,EAAiB,GAGvC,IAAI0B,GAAOJ,EAAUK,UAAS,0BAA2BjD,MAAMA,IAC3DkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,4BAC1CH,GAAUE,OAAM,KACrBJ,EAAKH,OAAM,IAEnBG,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAEvE,IAAIy/B,GAAQ59B,EAAKC,UAAS,QACrBjD,KAAK,SAAS6D,GAAK,OAAQA,IAChC+8B,GAAMz9B,QAAQC,OAAM,QACpBw9B,EAAM98B,OAAOC,SACb68B,EACKz6B,MAAK,SAAW,SAAStC,EAAEnF,GAAK,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC3D2E,KAAI,IAAMnH,GAAG0V,IAAIlO,OACbF,EAAE,SAASK,EAAEnF,GAAK,MAAO8E,GAAEmS,EAAK9R,EAAEnF,MAClCuM,EAAE,SAASpH,EAAEnF,GAAK,MAAOuM,GAAEsS,EAAK1Z,EAAEnF,MAI3C,IAAI4gC,GAASt8B,EAAKC,UAAS,mBACtBjD,KAAK,SAASA,GAEX,QAAS8d,GAAWhZ,GAChB,GAAa,IAATA,EAAa,CACb,GAAI+7B,GAAS7gC,EAAK8E,EAElB,OADA+7B,GAAO/iB,WAAahZ,EACb+7B,EAEP,MAAO,MAPf,GAAIC,GAAU9gC,EAAKmQ,IAAI,SAAStM,EAAGnF,GAAK,MAAO6e,GAAK1Z,EAAEnF,KAUlDqiC,EAAWjjB,EAAWgjB,EAAQE,YAAY/1B,EAAE5I,SAAS,KACrD4+B,EAAWnjB,EAAWgjB,EAAQjyB,QAAQ5D,EAAE5I,SAAS,KACjD6+B,EAAepjB,EAAWgjB,EAAQhjC,OAAS,EAC/C,QAASqjC,EAAmBF,EAAW,KAAQE,EAAmBJ,EAAW,KAAQK,EAAmBF,EAAe,MAAO/4B,OAAO,SAAUtE,GAAI,MAAY,OAALA,KAElKy7B,GAAOn8B,QAAQC,OAAM,UACrBk8B,EAAOx7B,OAAOC,SACdu7B,EACKj8B,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAO8E,GAAEmS,EAAK9R,EAAEA,EAAEia,eAC7Cza,KAAI,KAAO,SAASQ,EAAEnF,GAAK,MAAOuM,GAAEsS,EAAK1Z,EAAEA,EAAEia,eAC7Cza,KAAI,IAAM,GACVA,KAAI,QAAU,SAASQ,EAAEnF,GACtB,MAAOiX,GAAK9R,EAAGA,EAAEia,aAAeta,EAAEnB,SAAS,GAAK,2BACxCkb,EAAK1Z,EAAGA,EAAEia,aAAe7S,EAAE5I,SAAS,GAAK,uBAAyB,2BAItF8L,EAAYS,UAAS,uBACdpQ,EA3FX,GAUMkX,GACAI,EACAF,EACAiB,EAbF5V,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,GACTH,EAAY,KACZy+B,GAAU,EACV79B,EAAItH,GAAG8H,MAAMC,SACbgH,EAAI/O,GAAG8H,MAAMC,SACb0R,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9BjD,EAAQtM,EAAGG,MAAMsQ,UAAQ,SAKzBg1B,GAAmB,EACnBC,GAAmB,EACnBhlC,EAAWF,GAAGE,SAAQ,aAOxB+R,EAAczS,EAAGG,MAAMsS,YAAY/R,EA6GvC,OAlCAoC,GAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAmB6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IAC1EtB,QAAmB4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAC5EqR,SAAmB/K,IAAK,WAAW,MAAO+K,IAAW9K,IAAK,SAASvG,GAAGqR,EAAQrR,IAC9EyR,SAAmBnL,IAAK,WAAW,MAAOmL,IAAWlL,IAAK,SAASvG,GAAGyR,EAAQzR,IAC9EuR,QAAmBjL,IAAK,WAAW,MAAOiL,IAAUhL,IAAK,SAASvG,GAAGuR,EAAOvR,IAC5EwS,QAAmBlM,IAAK,WAAW,MAAOkM,IAAUjM,IAAK,SAASvG,GAAGwS,EAAOxS,IAC5EvC,QAAmB6I,IAAK,WAAW,MAAOnH,IAAKoH,IAAK,SAASvG,GAAGb,EAAEa,IAClEuS,QAAmBjM,IAAK,WAAW,MAAOM,IAAKL,IAAK,SAASvG,GAAG4G,EAAE5G,IAClEg9B,SAAmB12B,IAAK,WAAW,MAAO02B,IAAWz2B,IAAK,SAASvG,GAAGg9B,EAAQh9B,IAC9E88B,kBAAmBx2B,IAAK,WAAW,MAAOw2B,IAAoBv2B,IAAK,SAASvG,GAAG88B,EAAiB98B,IAChG+8B,kBAAmBz2B,IAAK,WAAW,MAAOy2B,IAAoBx2B,IAAK,SAASvG,GAAG+8B,EAAiB/8B,IAGhGb,GAAImH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAKzZ,GAAG4V,QAAQzN,KACpE4G,GAAIN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAKrhB,GAAG4V,QAAQzN,KAGpEpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,OAIlC7F,EAAMpC,SAAWA,EACjBV,EAAGG,MAAMqP,YAAY1M,GACdA,GC3IX9C,EAAGI,OAAOwlC,cAAgB,WACtB,YA8BA,SAAS9iC,GAAMsB,GAmJX,MAlJAqO,GAAYW,QACZX,EAAYrS,OAAO6kC,GACnB7gC,EAAUC,KAAK,SAASC,GAwEpB,QAASuhC,KACL,IAAIC,EAAJ,CAEA,GAAIC,GAAa5tB,EAAE5Q,UAAS,kBAAmBjD,KAAK8E,GAEhD48B,EAAaD,EAAWt+B,QACvBC,OAAM,KAAMC,KAAI,QAAU,iBAC1B8C,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,EAE3Bs7B,GAAW39B,OACN+F,aAAa1F,SAAS,KACtBgC,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,GACtBpC,SAEL09B,EACKp+B,KAAI,YAAc,SAASQ,GAAK,MAAO,aAAeL,EAAEm9B,EAAUn9B,IAAIxD,EAAK6D,GAAGA,IAAM,QACpFgG,aAAa1F,SAAS,KACtBgC,MAAK,iBAAmB,GACxBA,MAAK,eAAiB,GAEtBrB,EAAMhH,SAEX4jC,EAAWt+B,OAAM,QACZC,KAAI,KAAO,GACXA,KAAI,MAAQpC,EAAOE,KACnBkC,KAAI,KAAO,GACXA,KAAI,KAAO/B;AvChIhC,AuCkIgBogC,EvClId,AuCkIyBt+B,CvClIxB,KAAK,CAAC,AuCkIwB,CvClIvB,CAAC,EAAE,CAAC,GuCkI4BC,EvClIvB,CAAC,EAAE,AuCkIwB,CvClIvB,GAAG,CAAC,EAAE,CAAC,AuCkI0B,OvClInB,CAAC,KuCmIjBA,IvCnI0B,CuCmItB,IAAM,IACVA,KAAI,KAAOpC,EAAOE,KAClBkC,KAAI,cAAgB,OACpBA,KAAI,KAAO,QAEhBwQ,EAAEhR,OAAM,6BACH+K,KAAKgW,EAAY+c,EAAUn9B,IAAIxD,EAAK8E,EAAM,IAAKA,EAAM,MAE1D48B,EAAWt+B,OAAM,QAASC,KAAI,QAAU,aACnCA,KAAI,IAAM,GACVA,KAAI,KAAOpC,EAAOE,KAClBkC,KAAI,cAAgB,SACpBA,KAAI,KAAO,QAEhBwQ,EAAEhR,OAAM,6BACH+K,KAAKiW,EAAY8c,EAAU11B,IAAIjL,EAAK8E,EAAM,IAAKA,EAAM,QAG9D,QAAS68B,KAKL,QAASC,GAAgB5hC,EAAMwD,GAG3B,IAAK,GAFDuD,GAAW3B,KAAKC,IAAIs7B,EAAUn9B,IAAIxD,EAAK,GAAI,GAAKwD,GAChDq+B,EAAe,EACVnjC,EAAI,EAAGA,EAAIsB,EAAKlC,OAAQY,IACzB0G,KAAKC,IAAIs7B,EAAUn9B,IAAIxD,EAAKtB,GAAIA,GAAK8E,GAAKuD,IAC1CA,EAAW3B,KAAKC,IAAIs7B,EAAUn9B,IAAIxD,EAAKtB,GAAIA,GAAK8E,GAChDq+B,EAAenjC,EAGvB,OAAOmjC,GAbX,IAAIL,EAAJ,CAEA,GAAI74B,GAAMzM,GAAGiE,MAAM1D,MAAM,GAAKwE,EAAOC,IAcrC4D,IAAS88B,EAAgB5hC,EAAMoF,KAAK6E,MAAMzG,EAAElB,OAAOqG,MACnD44B,KA1IJ,GAAI3+B,GAAY1G,GAAG2G,OAAOpG,KAC1Bf,GAAGG,MAAMsW,QAAQvP,EAEjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAMlE,IAJAzC,EAAMqR,OAAS,WAAajN,EAAU9F,KAAK0B,IAC3CA,EAAMoE,UAAYnG,MAGbuD,IAASA,EAAKlC,OAEf,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,QAGtC,IAAIiB,GAAe27B,EAAU11B,IAAIjL,EAAKA,EAAKlC,OAAO,GAAIkC,EAAKlC,OAAO,EAGlE0F,GAAIm9B,EAAU7+B,SACdmJ,EAAI01B,EAAU/pB,QAGd,IAAI5T,GAAOJ,EAAUK,UAAS,8BAA+BjD,MAAMA,IAC/DkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,iCACnD6V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,oBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,gBAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,IAGvE,IAAI2gC,GAAgBjuB,EAAEhR,OAAM,oBAK5B,IAHA89B,EAAU79B,MAAMzB,GAAgB0B,OAAOzB,GACvCwgC,EAAchlC,KAAK6jC,GAEfoB,EAAe,CACf,GAAIC,GAAYnuB,EAAEhR,OAAM,iBACpB6E,EAAQs6B,EAAU/+B,UAAS,oBAC1BjD,MAAMgF,GAEX0C,GAAMvE,QAAQC,OAAM,QAASC,KAAI,QAAU,mBACtCA,KAAI,KAAO4+B,EAAkB,GAAK,GAClC5+B,KAAI,KAAO,QACX8C,MAAK,cAAgB87B,EAAkB,MAAQ,SAEpDv6B,EACKrE,KAAI,IAAMhC,GAAkB4gC,EAAkBhhC,EAAOuR,MAAQ,IAC7DnP,KAAI,IAAM6+B,EAAa,SAAUr+B,GAC9B,MAAOoH,GAAEpH,IACT,GACHsC,MAAK,OAASw6B,EAAU34B,QAAQhI,EAAKA,EAAKlC,OAAS,GAAIkC,EAAKlC,OAAS,IACrE8P,KAAKiW,EAAY7e,IAG1BkU,EAAOrW,OAAM,iBAAkBO,OAAM,QAChC/F,GAAE,YAAcskC,GAChBtkC,GAAE,QAAU,WAAamkC,GAAUA,IACnCnkC,GAAE,WAAa,WAAayH,KAAYy8B,MAE7C1tB,EAAEhR,OAAM,sBACHQ,KAAI,YAAc,SAASQ,GAAK,MAAO,cAAgB5C,EAAOC,KAAO,KAAOD,EAAOE,IAAM,MACzFkC,KAAI,QAAUhC,EAAiBJ,EAAOC,KAAOD,EAAOuR,OACpDnP,KAAI,SAAW/B,EAAkBL,EAAOE,OA0EjDgN,EAAYS,UAAS,2BACdpQ,EA3KX,GAKMgF,GACAyH,EANF01B,EAAYjlC,EAAGI,OAAO6kC,YAEtB1/B,GAAUE,IAAK,GAAIqR,MAAO,IAAKD,OAAQ,GAAIrR,KAAM,IAC/C4B,EAAQ,KACRC,EAAS,KAGT+B,KACA08B,GAAS,EACT5d,EAAc1nB,GAAGmM,OAAM,MACvBwb,EAAc3nB,GAAGmM,OAAM,QACvB05B,GAAgB,EAChBG,GAAa,EACbD,GAAkB,EAClBxvB,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,aAOxB+R,EAAczS,EAAGG,MAAMsS,YAAY/R,EAyLvC,OA5BAoC,GAAMpC,SAAWA,EACjBoC,EAAMmiC,UAAYA,EAElBniC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAkB6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACzEtB,QAAkB4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IAC3Euf,aAAkBjZ,IAAK,WAAW,MAAOiZ,IAAehZ,IAAK,SAASvG,GAAGuf,EAAYvf,IACrFwf,aAAkBlZ,IAAK,WAAW,MAAOkZ,IAAejZ,IAAK,SAASvG,GAAGwf,EAAYxf,IACrF09B,eAAkBp3B,IAAK,WAAW,MAAOo3B,IAAiBn3B,IAAK,SAASvG,GAAG09B,EAAc19B,IACzF69B,YAAkBv3B,IAAK,WAAW,MAAOu3B,IAAct3B,IAAK,SAASvG,GAAG69B,EAAW79B,IACnF49B,iBAAkBt3B,IAAK,WAAW,MAAOs3B,IAAmBr3B,IAAK,SAASvG,GAAG49B,EAAgB59B,IAC7FoO,QAAkB9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAG3EpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,SAInExF,EAAGG,MAAMkW,eAAevT,EAAOmiC,GAC/BjlC,EAAGG,MAAMqP,YAAY1M,GAEdA,GCtNX9C,EAAGI,OAAO81B,YAAc,WACpB,YA8CA,SAASpzB,GAAMsB,GA8LX,MA7LAqO,GAAYW,QACZX,EAAYrS,OAAOgtB,GACnBhpB,EAAUC,KAAK,SAASC,GACpB,GAAIqB,GAAiByB,EAAQ7B,EAAOC,KAAOD,EAAOuR,MAC9ClR,EAAkByB,EAAS9B,EAAOE,IAAMF,EAAOsR,MAEnD3P,GAAY1G,GAAG2G,OAAOpG,MACtBf,EAAGG,MAAMsW,QAAQvP,GAGjBY,EAAIslB,EAAQhnB,SACZmJ,EAAI6d,EAAQlS,QAEZ,IAAIurB,GAAUniC,CAEdA,GAAK2F,QAAQ,SAASy8B,EAAS1jC,GAC3B0jC,EAAQjiB,YAAczhB,EACtB0jC,EAAQ79B,OAAS69B,EAAQ79B,OAAO4L,IAAI,SAAStM,EAAGqT,GAG5C,MAFArT,GAAEiB,MAAQoS,EACVrT,EAAEsc,YAAczhB,EACTmF,KAIf,IAAIw+B,GAAeriC,EAAKmI,OAAO,SAASL,GACpC,OAAQA,EAAOyR,UAGnBvZ,GAAO9D,GAAGmoB,OAAOiJ,QACZgV,MAAMA,GACNv3B,OAAOA,GACPxG,OAAO,SAASV,GAAK,MAAOA,GAAEU,SAC9Bf,EAAEmS,GACF1K,EAAEsS,GACFglB,IAAI,SAAS1+B,EAAG4d,EAAIxW,GACjBpH,EAAEoc,SACEhV,EAAGA,EACHwW,GAAIA,KAGf4gB,EAGD,IAAIr/B,GAAOJ,EAAUK,UAAS,4BAA6BjD,MAAMA,IAC7DkD,EAAYF,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,+BACnD8V,EAAYjW,EAAUE,OAAM,QAC5B8V,EAAShW,EAAUE,OAAM,KACzByQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,eACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBAEjCL,EAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAIxC,GAA3B2nB,EAAQ9L,SAASlf,QACjBgrB,EAAQ9L,SAAS7d,KAAK,GAG1B2pB,EACKhmB,MAAMzB,GACN0B,OAAOzB,GACPkC,EAAEmS,GACF1K,EAAE,SAASpH,GACR,MAAkBtE,UAAdsE,EAAEoc,QAAgCpc,EAAEoc,QAAQhV,EAAIpH,EAAEoc,QAAQwB,GAA9D,SAEHzZ,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GAEvB,MADAmF,GAAEmE,MAAQnE,EAAEmE,OAASA,EAAMnE,EAAGA,EAAEsc,aACzBtc,EAAEmE,QAGjB,IAAI+gB,GAAclV,EAAEhR,OAAM,mBACrByW,MAAMtZ,EAEX+oB,GAAYjsB,KAAKgsB,GAEjB3P,EAAU/V,OAAM,YACXC,KAAI,KAAO,gBAAkB4C,GAC7B7C,OAAM,QAEXJ,EAAKH,OAAM,iBAAoBoD,EAAK,SAC/B5C,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpBuS,EAAExQ,KAAI,YAAc8Z,EAAW,qBAAuBlX,EAAK,IAAM,GAEjE,IAAImjB,GAAOltB,GAAG0V,IAAIwX,OACbC,QAAQA,GACR7lB,EAAE,SAASK,EAAEnF,GAAM,MAAO8E,GAAEmS,EAAK9R,EAAEnF,MACnC+iB,GAAG,SAAS5d,GACT,MAAOoH,GAAEpH,EAAEoc,QAAQwB,MAEtB6H,GAAG,SAASzlB,GACT,MAAOoH,GAAEpH,EAAEoc,QAAQhV,EAAIpH,EAAEoc,QAAQwB,MAEpCkC,YAAYA,GAEb6e,EAAWtmC,GAAG0V,IAAIwX,OACjBC,QAAQA,GACR7lB,EAAE,SAASK,EAAEnF,GAAM,MAAO8E,GAAEmS,EAAK9R,EAAEnF,MACnC+iB,GAAG,SAAS5d,GAAK,MAAOoH,GAAEpH,EAAEoc,QAAQwB,MACpC6H,GAAG,SAASzlB,GAAK,MAAOoH,GAAEpH,EAAEoc,QAAQwB,MAErCyN,EAAOrb,EAAEhR,OAAM,gBAAiBI,UAAS,gBACxCjD,KAAK,SAAS6D,GAAK,MAAOA,IAE/BqrB,GAAK/rB,QAAQC,OAAM,QAASC,KAAI,QAAU,SAASQ,EAAEnF,GAAK,MAAO,mBAAqBA,IACjF2E,KAAI,IAAM,SAASQ,EAAEnF,GAClB,MAAO8jC,GAAS3+B,EAAEU,OAAQV,EAAEsc,eAE/B9iB,GAAE,YAAc,SAASwG,EAAEnF,GACxBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASqmC,eACLz3B,MAAOnH,EACPiE,OAAQjE,EAAEqE,IACVS,KAAMzM,GAAGuE,MAAMiiC,MAAOxmC,GAAGuE,MAAMkiC,OAC/BxiB,YAAatc,EAAEsc,gBAGtB9iB,GAAE,WAAa,SAASwG,EAAEnF,GACvBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASwmC,cACL53B,MAAOnH,EACPiE,OAAQjE,EAAEqE,IACVS,KAAMzM,GAAGuE,MAAMiiC,MAAOxmC,GAAGuE,MAAMkiC,OAC/BxiB,YAAatc,EAAEsc,gBAGtB9iB,GAAE,QAAU,SAASwG,EAAEnF,GACpBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GACjChK,EAASymC,WACL73B,MAAOnH,EACPiE,OAAQjE,EAAEqE,IACVS,KAAMzM,GAAGuE,MAAMiiC,MAAOxmC,GAAGuE,MAAMkiC,OAC/BxiB,YAAatc,EAAEsc,gBAI3B+O,EAAKprB,OAAOC,SACZmrB,EAAK/oB,MAAK,OAAS,SAAStC,EAAEnF,GACtB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGA,EAAEsc,eAEhCha,MAAK,SAAW,SAAStC,EAAEnF,GAAI,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGA,EAAEsc,eACjE+O,EAAKhhB,gBAAgBC,EAAW,oBAC3B9K,KAAI,IAAM,SAASQ,EAAEnF,GAClB,MAAO0qB,GAAKvlB,EAAEU,OAAO7F,KAO7BoqB,EAAQ1sB,SAASiB,GAAE,wBAA0B,SAASC,GAClDuW,EAAEhR,OAAM,aAAgBoD,EAAK,aAAe3I,EAAE6iB,aAAa/Z,QAAO,SAAU,KAEhF0iB,EAAQ1sB,SAASiB,GAAE,uBAAyB,SAASC,GACjDuW,EAAEhR,OAAM,aAAgBoD,EAAK,aAAe3I,EAAE6iB,aAAa/Z,QAAO,SAAU,KAIhF5H,EAAMskC,8BAAgC,SAASC,GAC3C,GAEIrkC,GACAwY,EACA8rB,EAJA/0B,EAAI80B,EAAUjlC,OACd4Q,EAAIq0B,EAAU,GAAGjlC,OAIjB2jB,IAEJ,KAAKvK,EAAI,EAAOxI,EAAJwI,IAASA,EAAG,CACpB,IAAKxY,EAAI,EAAGskC,EAAI,EAAGtkC,EAAIyjC,EAAQrkC,OAAQY,IACnCskC,GAAKzlB,EAAK4kB,EAAQzjC,GAAG6F,OAAO2S,GAGhC,IAAI8rB,EAAG,IAAKtkC,EAAI,EAAOuP,EAAJvP,EAAOA,IACtBqkC,EAAUrkC,GAAGwY,GAAG,IAAM8rB,MAEtB,KAAKtkC,EAAI,EAAOuP,EAAJvP,EAAOA,IACfqkC,EAAUrkC,GAAGwY,GAAG,GAAK,EAIjC,IAAKA,EAAI,EAAOxI,EAAJwI,IAASA,EAAGuK,EAAGvK,GAAK,CAChC,OAAOuK,MAKftT,EAAYS,UAAS,yBACdpQ,EAtOX,GAcMgF,GACAyH,EAfFhK,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,IACTiF,EAAQtM,EAAGG,MAAMuQ,eACjBnG,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrBjE,EAAY,KACZ+S,EAAO,SAAS9R,GAAK,MAAOA,GAAEL,GAC9B+Z,EAAO,SAAS1Z,GAAK,MAAOA,GAAEoH,GAC9Boe,EAAU,SAASxlB,EAAEnF,GAAK,OAAQmN,MAAM0R,EAAK1Z,EAAEnF,KAAqB,OAAd6e,EAAK1Z,EAAEnF,IAC7DyH,EAAQ,QACR4E,EAAS,OACTu3B,EAAQ,UACR3e,EAAc,SACdxG,GAAW,EAGX2L,EAAUptB,EAAGI,OAAOgtB,UACpB3kB,EAAW,IACX/H,EAAYF,GAAGE,SAAQ,YAAc,gBAAiB,eAAa,YAAe,eAAgB,mBAAoB,kBAG5H0sB,GACKU,UAAU,KACVC,aAAa,IAAK,KAevB,IAAItb,GAAczS,EAAGG,MAAMsS,YAAY/R,EAAU+H,EA2RjD,OApFA3F,GAAMpC,SAAWA,EACjBoC,EAAMsqB,QAAUA,EAEhBA,EAAQ1sB,SAASiB,GAAE,eAAiB,WAAYjB,EAASqG,aAAatF,MAAMV,KAAMM,aAClF+rB,EAAQ1sB,SAASiB,GAAE,mBAAqB,WAAYjB,EAASsb,iBAAiBva,MAAMV,KAAMM,aAC1F+rB,EAAQ1sB,SAASiB,GAAE,kBAAoB,WAAYjB,EAASsF,gBAAgBvE,MAAMV,KAAMM,aAExFyB,EAAMmlB,YAAc,SAAStf,GACzB,MAAKtH,WAAUe,QACf6lB,EAActf,EACP7F,GAFuBmlB,GAKlCnlB,EAAM2F,SAAW,SAASE,GACtB,MAAKtH,WAAUe,QACfqG,EAAWE,EACX8J,EAAYW,MAAM3K,GAClB2kB,EAAQ3kB,SAASA,GACV3F,GAJuB2F,GAOlC3F,EAAMpC,SAAWA,EACjBoC,EAAMsqB,QAAUA,EAChBtqB,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEglB,SAAU1e,IAAK,WAAW,MAAO0e,IAAWze,IAAK,SAASvG,GAAGglB,EAAQhlB,IACrE8Y,UAAWxS,IAAK,WAAW,MAAOwS,IAAYvS,IAAK,SAASvG,GAAG8Y,EAAS9Y,IACxE0G,QAAcJ,IAAK,WAAW,MAAOI,IAAUH,IAAK,SAASvG,GAAG0G,EAAO1G,IACvEi+B,OAAW33B,IAAK,WAAW,MAAO23B,IAAS13B,IAAK,SAASvG,GAAGi+B,EAAMj+B,IAClEsf,aAAiBhZ,IAAK,WAAW,MAAOgZ,IAAe/Y,IAAK,SAASvG,GAAGsf,EAAYtf,IAGpFb,GAAQmH,IAAK,WAAW,MAAOgL,IAAQ/K,IAAK,SAASvG,GAAGsR,EAAOzZ,GAAG4V,QAAQzN,KAC1E4G,GAAQN,IAAK,WAAW,MAAO4S,IAAQ3S,IAAK,SAASvG,GAAGkZ,EAAOrhB,GAAG4V,QAAQzN,KAG1EpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/D8G,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,KAE9B8B,OAAQwE,IAAK,WAAW,MAAOxE,IAASyE,IAAK,SAASvG,GAElD,OADA8B,EAAQ9B,GAEJ,IAAK,QACD7F,EAAMuM,OAAM,QACZvM,EAAM8jC,MAAK,UACX,MACJ,KAAK,SACD9jC,EAAMuM,OAAM,UACZvM,EAAM8jC,MAAK,aACX,MACJ,KAAK,gBACD9jC,EAAMuM,OAAM,cACZvM,EAAM8jC,MAAK,aACX,MACJ,KAAK,SACD9jC,EAAMuM,OAAM,UACZvM,EAAM8jC,MAAK,UACX,MACJ,KAAK,gBACD9jC,EAAMuM,OAAOvM,EAAMskC,+BACnBtkC,EAAM8jC,MAAK,cAIvBn+B,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClB2kB,EAAQ3kB,SAASA,OAIzBzI,EAAGG,MAAMkW,eAAevT,EAAOsqB,GAC/BptB,EAAGG,MAAMqP,YAAY1M,GAEdA,GCxUX9C,EAAGI,OAAOmnC,iBAAmB,WACzB,YAiGA,SAASzkC,GAAMsB,GA2cX,MA1cAqO,GAAYW,QACZX,EAAYrS,OAAOsxB,GACfxU,GAAWzK,EAAYrS,OAAO+c,GAC9BC,GAAW3K,EAAYrS,OAAOid,GAElCjZ,EAAUC,KAAK,SAASC,GAsMpB,QAAS2pB,KACF/Q,GACC/E,EAAEhR,OAAM,2BACHQ,KAAI,YAAc,eAAiB/B,EAAkB,KACrDuI,aACA1F,SAASA,GACTrH,KAAK+b,GAKlB,QAAS+Q,KACL,GAAG9Q,EAAW,CACV,GAAwB,WAApBsU,EAAQjnB,SAA4C,kBAApBinB,EAAQjnB,QAA6B,CACrE,GAAI+8B,GAAgBnqB,EAAM7E,YAEpBivB,IAAkBD,IAAkBE,IACtCD,EAAiBD,GAGrBnqB,EAAM7E,WAAWkvB,OAGbD,KACApqB,EAAM7E,WAAWivB,GACjBA,EAAiB,KAIzBtvB,GAAEhR,OAAM,2BACPgH,aAAa1F,SAAS,GACtBrH,KAAKic,IAqMd,QAASqK,GAAQhI,GAEb,GAAIioB,GAAcxvB,EAAEhR,OAAM,6BACrByW,MACDtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAC/BpJ,IAAI,SAAStM,EAAEnF,GACZ,OACIwJ,IAAKrE,EAAEqE,IACPkhB,KAAMvlB,EAAEulB,KACRhjB,QAASvC,EAAEuC,QACX7B,OAAQV,EAAEU,OAAO4D,OAAO,SAAStE,EAAEnF,GAC/B,MAAO0uB,GAAQ5pB,IAAIK,EAAEnF,IAAM0c,EAAO,IAAMgS,EAAQ5pB,IAAIK,EAAEnF,IAAM0c,EAAO,KAEvE0O,eAAgBjmB,EAAEimB,kBAIlCuZ,GAAYx5B,aAAa1F,SAASA,GAAUrH,KAAKswB,GAGjDzD,IACAC,IA9bJ,GAAIhnB,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,EAEjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAAW8oB,EAAcpH,EAAM5f,SAAW,EAa5G,IAXAvE,EAAMqR,OAAS,WAAajN,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAC3EA,EAAMoE,UAAYnG,KAElBgR,EACKmC,OAAO+O,EAAY3e,GAAOxB,EAAMqR,QAChCH,OAAOkP,EAAY5e,IACnB6P,SAGLpC,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,YAE9CsF,EAAc,CACf,GAAI3W,EACJ2W,KACA,KAAK3W,IAAOuF,GACJA,EAAMvF,YAAgBtL,OACtBiiB,EAAa3W,GAAOuF,EAAMvF,GAAKrL,MAAM,GAErCgiB,EAAa3W,GAAOuF,EAAMvF,GAKtC,KAAKlI,GAASA,EAAKlC,QAAWkC,EAAKmI,OAAO,SAAStE,GAAK,MAAOA,GAAEU,OAAOzG,SAAUA,QAE9E,MADApC,GAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,CAEPoE,GAAUK,UAAS,cAAec,SAGtCP,EAAI4pB,EAAQtrB,SACZmJ,EAAImiB,EAAQxW,QAGZ,IAAI5T,GAAOJ,EAAUK,UAAS,iCAAkCjD,MAAMA,IAClEkZ,EAASlW,EAAKG,QAAQC,OAAM,KAAMC,KAAI,QAAU,oCAAoCD,OAAM,KAC1FyQ,EAAI7Q,EAAKH,OAAM,IAEnBqW,GAAO9V,OAAM,KAAMC,KAAI,QAAU,iBACjC6V,EAAO9V,OAAM,KAAMC,KAAI,QAAU,kBAEjC,IAAI2mB,GAAa9Q,EAAO9V,OAAM,KAAMC,KAAI,QAAU,WAClD2mB,GAAW5mB,OAAM,KAAMC,KAAI,QAAU,iBAAiBD,OAAM,QAC5D4mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,gBACrC2mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,gBACrC2mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,kBACrC2mB,EAAW5mB,OAAM,KAAMC,KAAI,QAAU,iBAIlB6V,GAAO9V,OAAM,KAAMC,KAAI,QAAU,eAGpD,IAAKkc,EAEE,CACH,GAAI6H,GAAe1H,GAAmC,QAAnBuK,EAA4B5oB,EAAiBgtB,EAAehtB,CAK/F,IAHAme,EAAO1c,MAAMskB,GACbvT,EAAEhR,OAAM,kBAAmByW,MAAMtZ,GAAMlD,KAAK0iB,GAErB,WAAnByK,EAA6B,CAEhC,GAAIqZ,IAAe1qB,EAAY,GAAK,GAAK,EACtC3X,GAAOsR,OAASnN,KAAKL,IAAIya,EAAOzc,SAAWugC,EAAariC,EAAOsR,QAC/DjR,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAAW8oB,EAAcpH,EAAM5f,SAAW,EAC3G,IAAIwgC,GAAYjiC,EAAkBgiC,CAC/BzvB,GAAEhR,OAAM,kBACHQ,KAAI,YAAc,eAAiBkgC,EAAU,SACxB,QAAnBtZ,IACFxK,GAAaxe,EAAOE,KAAOqe,EAAOzc,WACnC9B,EAAOE,IAAMqe,EAAOzc,SACpBzB,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAAW8oB,EAAcpH,EAAM5f,SAAW,IAG5G8Q,EAAEhR,OAAM,kBACNQ,KAAI,YAAc,cAAgBhC,EAAe+lB,GAAe,KAAQnmB,EAAOE,IAAK,UAtB1F0S,GAAEhR,OAAM,kBAAmBI,UAAS,KAAMc,QA2B9C,IAAK2b,EAEE,CACH,GAAIC,MAEIzX,IAAKomB,EAAclB,SAAW,UAC9BoW,QAAS,UACTjqB,SAA6B,SAAnB6T,EAAQjnB,QAClBA,MAAO,UAGP+B,IAAKomB,EAAcmV,QAAU,SAC7BD,QAAS,SACTjqB,SAA6B,UAAnB6T,EAAQjnB,QAClBA,MAAO,WAGP+B,IAAKomB,EAAc5I,UAAY,WAC/B8d,QAAS,WACTjqB,SAA6B,UAAnB6T,EAAQjnB,QAClBA,MAAO,WAGP+B,IAAKomB,EAAcoV,eAAiB,UACpCF,QAAS,gBACTjqB,SAA6B,iBAAnB6T,EAAQjnB,QAClBA,MAAO,iBAIfkoB,GAAgBsV,EAAe7lC,OAAO,EAAK,IAC3C6hB,GAAeA,GAAaxX,OAAO,SAAStE,GACxC,MAA6C,KAAtC8/B,EAAe90B,QAAQhL,EAAE2/B,WAGpC5jB,EACK9c,MAAOurB,GACPrmB,OAAK,OAAU,OAAQ,SAE5B6L,EAAEhR,OAAM,oBACHyW,MAAMqG,IACN7iB,KAAK8iB,EAEV,IAAIgkB,IAAcx+B,KAAKL,IAAI6a,EAAS7c,SAAUwc,GAAkC,QAAnB0K,EAA4BzK,EAAOzc,SAAW,EAEtG9B,GAAOE,KAAOyiC,KACf3iC,EAAOE,IAAMyiC,GACbtiC,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,IAAW8oB,EAAcpH,EAAM5f,SAAW,IAG5G8Q,EAAEhR,OAAM,oBACHQ,KAAI,YAAc,gBAAmBpC,EAAOE,IAAK,SAlDrD0S,GAAEhR,OAAM,oBAAqBI,UAAS,KAAMc,QAqDjDf,GAAKK,KAAI,YAAc,aAAepC,EAAOC,KAAO,IAAMD,EAAOE,IAAM,KAEnEiY,GACAvF,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,aAAehC,EAAiB,OAIvD0e,IACAC,EACKld,MAAMzB,GACN0B,OAAOzB,GACPL,QAAQC,KAAMD,EAAOC,KAAMC,IAAKF,EAAOE,MACvCmC,aAAaV,GACbd,OAAO0B,GACZR,EAAKH,OAAM,mBAAoB/F,KAAKkjB,IAGxCnM,EAAEhR,OAAM,iCACHQ,KAAI,QAAUhC,GACdgC,KAAI,SAAW/B,GAEpB8rB,EACKtqB,MAAMzB,GACN0B,OAAOzB,GACP0G,MAAMhI,EAAKmQ,IAAI,SAAStM,EAAEnF,GACvB,MAAOmF,GAAEmE,OAASA,EAAMnE,EAAGnF,KAC5ByJ,OAAO,SAAStE,EAAEnF,GAAK,OAAQsB,EAAKtB,GAAG6a,WAE9C,IAAI8pB,IAAcxvB,EAAEhR,OAAM,6BACrByW,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAS/C,IANIX,GACAC,EAAM7U,MAAMR,GACPid,OAAQ/kB,EAAGG,MAAMuU,WAAW/O,EAAe,IAAKrB,IAChDwZ,UAAWlY,EAAiB,GAGjCwX,EAAW,CACX,GAAIhF,GAEAA,IADqB,WAArBsZ,EAAQriB,SACA,EAGArP,EAAGG,MAAM2U,WAAWlP,EAAgB,GAAItB,GAEpD+Y,EAAM/U,MAAMiH,GACPwV,OAAO3M,IACP0F,UAAUnY,EAAgB,GA4CnC,GAAI0oB,EAIG,CACHpH,EAAM7f,MAAMzB,GACZwS,EAAEhR,OAAM,iBACHQ,KAAI,YAAc,gBAAmB/B,EAAkBL,EAAOsR,OAASoQ,EAAM1hB,SAASE,KAAO,KAC7FmY,MAAMtZ,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,YAC1Czc,KAAK6lB,EACV,IAAIvH,IAASuH,EAAMG,MAAMC,QAAUJ,EAAMjN,UAAYiN,EAAMG,MAAM1H,QACnD,QAAXA,IACCgI,EAAQhI,QAXZioB,IAAYx5B,aAAa/M,KAAKswB,GAC9BzD,IACAC,GAiBJwD,GAAQhxB,SAASiB,GAAE,mBAAqB,SAASC,GACkB,IAA3D0C,EAAKmI,OAAO,SAAStE,GAAK,OAAQA,EAAE0V,WAAYzb,OAChDkC,EAAK2F,QAAQ,SAAS9B,GAClBA,EAAE0V,UAAW,IAGjBvZ,EAAK2F,QAAQ,SAAS9B,EAAEnF,GACpBmF,EAAE0V,SAAY7a,GAAKpB,EAAE6iB,cAG7B1S,EAAM8L,SAAWvZ,EAAKmQ,IAAI,SAAStM,GAAK,QAASA,EAAE0V,WACnDnd,EAASoiB,YAAY/Q,GAErBjP,EAAMqR,WAGV2P,EAAOpjB,SAASiB,GAAE,cAAgB,SAASqjB,GACvC,IAAK,GAAIxY,KAAOwY,GACZjT,EAAMvF,GAAOwY,EAASxY,EAC1B9L,GAASoiB,YAAY/Q,GACrBjP,EAAMqR,WAGV+P,EAASxjB,SAASiB,GAAE,cAAgB,SAASwG,EAAEnF,GACtCmF,EAAE0V,WAEPoG,GAAeA,GAAaxP,IAAI,SAASwB,GAErC,MADAA,GAAE4H,UAAW,EACN5H,IAEX9N,EAAE0V,UAAW,EAEb6T,EAAQjnB,MAAMtC,EAAEsC,OAGhBsH,EAAMtH,MAAQinB,EAAQjnB,QACtB/J,EAASoiB,YAAY/Q,GAErBjP,EAAMqR,YAGVmQ,EAAiB5jB,SAASiB,GAAE,mBAAqB,SAASC,GACtD8vB,EAAQpP,iBACR,IAAI2C,GAAa7C,EAAY8C,EAAgBC,KAAcgjB,EAAW,EAAGC,GAAgB,CAmCzF,IAlCA9jC,EACKmI,OAAO,SAASL,EAAQpJ,GAErB,MADAoJ,GAAOqY,YAAczhB,GACboJ,EAAOyR,WAElB5T,QAAQ,SAASmC,EAAOpJ,GACrBof,EAAapiB,EAAG4I,kBAAkBwD,EAAOvD,OAAQjH,EAAE0E,YAAaxD,EAAMgF,IACtE,IAAIwH,GAAQlD,EAAOvD,OAAOuZ,GACtBqM,EAAc3rB,EAAMyM,IAAID,EAAO8S,EAInC,IAHmB,MAAfqM,GACAiD,EAAQvP,eAAenf,EAAGof,GAAY,GAErB,mBAAV9S,GAAX,CAC2B,mBAAhB2V,KAA6BA,EAAc3V,GACxB,mBAAnB4V,KAAgCA,EAAiBpiB,EAAMsD,SAAStD,EAAMgF,IAAIwH,EAAM8S,IAG3F,IAAIimB,GAAmC,UAAnB3W,EAAQjnB,QAAuB6E,EAAMiV,QAAQhV,EAAIzM,EAAMyM,IAAID,EAAM8S,EACrF+C,GAAQ1hB,MACJ+I,IAAKJ,EAAOI,IACZR,MAAOq8B,EACP/7B,MAAOA,EAAMF,EAAOA,EAAOqY,aAC3BnV,MAAOA,IAGPg5B,GAAyC,UAAnB5W,EAAQjnB,SAAuC,MAAhB49B,IACvDF,GAAYE,EACZD,GAAgB,MAI1BjjB,EAAQ7N,UAGJ6N,EAAQ/iB,OAAS,EAAG,CACpB,GAAIgjB,GAAStiB,EAAMoY,SAAStU,OAAOhF,EAAE+C,QACZqF,EAAmB,IAC5Cmb,GAAQlb,QAAQ,SAASmC,EAAOpJ,GAI5BoiB,EAAS1b,KAAKC,IAAIyb,EAClB,IAAImjB,GAAY7+B,KAAKC,IAAIyC,EAAOkD,MAAMiV,QAAQwB,IAC1CyiB,EAAW9+B,KAAKC,IAAIyC,EAAOkD,MAAMiV,QAAQhV,EAC7C,OAAK6V,IAAUmjB,GAAwBC,EAAWD,GAAtBnjB,OAExBpb,EAAmBhH,GAFvB,SAMoB,MAApBgH,IACAmb,EAAQnb,GAAkBqC,WAAY,GAI1Ci8B,GAAyC,UAAnB5W,EAAQjnB,SAAuB0a,EAAQ/iB,QAAU,IAAMgmC,GAC7EjjB,EAAQ1hB,MACJ+I,IAAKi8B,EACLz8B,MAAOm8B,EACP57B,OAAO,GAIf,IAAI+Y,GAASxiB,EAAMgF,IAAImd,EAAY7C,GAE/B3W,EAAiB6Y,EAAiBpkB,QAAQuL,gBAEtB,YAApBimB,EAAQjnB,SAA4C,kBAApBinB,EAAQjnB,SAClCi+B,IACFA,EAAoBj9B,GAGxBA,EAAiBjL,GAAGmM,OAAM,QAGtB+7B,IACAj9B,EAAiBi9B,EACjBA,EAAoB,MAI5BpkB,EAAiBpkB,QACZuL,eAAeA,GACfnH,MAEG0H,MAAOsZ,EACPlZ,OAAQ+Y,MAIhBb,EAAiBre,gBAAgBif,KAIrCZ,EAAiB5jB,SAASiB,GAAE,kBAAmB,SAASC,GACpD8vB,EAAQpP,oBAIZ2E,EAAMvmB,SAASiB,GAAE,UAAY,SAAS+d,GAClCgI,EAAQhI,KAIZhf,EAASiB,GAAE,cAAgB,SAASC,GAEN,mBAAfA,GAAEic,UAA4BvZ,EAAKlC,SAAWR,EAAEic,SAASzb,SAChEkC,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,SAAWjc,EAAEic,SAAS7a,KAGjC+O,EAAM8L,SAAWjc,EAAEic,UAGA,mBAAZjc,GAAE6I,QACTinB,EAAQjnB,MAAM7I,EAAE6I,OAChBA,EAAQ7I,EAAE6I,OAGd3H,EAAMqR,aAiCd1B,EAAYS,UAAS,gCACdpQ,EAtiBX,GAyBMgF,GACAyH,EA1BFmiB,EAAU1xB,EAAGI,OAAO81B,cAClB/Y,EAAQnd,EAAGI,OAAO8X,OAClBmF,EAAQrd,EAAGI,OAAO8X,OAClB4L,EAAS9jB,EAAGI,OAAO0jB,SACnBI,EAAWlkB,EAAGI,OAAO0jB,SACrBQ,EAAmBtkB,EAAGkE,uBACtBhE,EAAUF,EAAGI,OAAOF,UACpB+mB,EAAQjnB,EAAGI,OAAO6mB,MAAMjnB,EAAGI,OAAO81B,eAGpC3wB,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9Cue,EAAY,KACZ3c,EAAQ,KACRC,EAAS,KACTiF,EAAQtM,EAAGG,MAAMuQ,eACjBsT,GAAe,EACfH,GAAa,EACb0K,EAAiB,MACjBrR,GAAY,EACZE,GAAY,EACZM,GAAkB,EAClB2Q,GAAc,EACdhK,GAA0B,EAC1BikB,GAAqB,EACrBG,EAAa,QAGb12B,EAAQ/R,EAAGG,MAAM4R,QACjBoR,EAAe,KACfpM,EAAS,KACTrW,EAAWF,GAAGE,SAAQ,cAAgB,cAAY,aAClDiyB,EAAe,IACfsV,GAAiB,UAAS,SAAS,YACnCrV,KACAnqB,EAAW,GAGjBsJ,GAAMtH,MAAQinB,EAAQjnB,QACtB0S,EAAM9E,OAAM,UAAWW,YAAY,GACnCqE,EAAMhF,OAAM,EAAqB,QAAU,QAE3CnY,EACKwL,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAEhCyI,eAAe,SAAStD,EAAGnF,GACxB,MAAOqa,GAAM7E,aAAarQ,EAAGnF,KAGrCshB,EAAiBpkB,QACZwL,gBAAgB,SAASvD,EAAGnF,GACzB,MAAOma,GAAM3E,aAAarQ,EAAGnF,KAEhCyI,eAAe,SAAStD,EAAGnF,GACxB,MAAY,OAALmF,EAAY,MAAQkV,EAAM7E,aAAarQ,EAAGnF,IAGzD,IAAIykC,GAAiB,KACjBiB,EAAoB,IAExBxkB,GAASyB,aAAY,EAMrB,IAAIlT,GAAczS,EAAGG,MAAMsS,YAAY/R,GACnC+J,EAAQinB,EAAQjnB,QAEhByY,EAAc,SAAS5e,GACvB,MAAO,YACH,OACI3B,OAAQ2B,EAAKmQ,IAAI,SAAStM,GAAK,OAAQA,EAAE0V,WACzCpT,MAAOinB,EAAQjnB,WAKvBwY,EAAc,SAAS3e,GACvB,MAAO,UAASyN,GACQlO,SAAhBkO,EAAMtH,QACNA,EAAQsH,EAAMtH,OACG5G,SAAjBkO,EAAMpP,QACN2B,EAAK2F,QAAQ,SAASmC,EAAOpJ,GACzBoJ,EAAOyR,UAAY9L,EAAMpP,OAAOK,OAK5C0kC,EAAmBlnC,GAAGmM,OAAM,IAwjBhC,OApGA+kB,GAAQhxB,SAASiB,GAAE,2BAA6B,SAASoc,GACrDA,EAAIzO,MAAQ,EAAKoiB,EAAQ5pB,IAAIiW,EAAIzO,OACjCyO,EAAIzO,MAAQ,EAAKoiB,EAAQniB,IAAIwO,EAAIzO,OACjCpP,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7BwrB,EAAQhxB,SAASiB,GAAE,0BAA4B,SAASoc,GACpD7d,EAAQgG,QAAO,KAOnBpD,EAAMpC,SAAWA,EACjBoC,EAAM4uB,QAAUA,EAChB5uB,EAAMghB,OAASA,EACfhhB,EAAMohB,SAAWA,EACjBphB,EAAMqa,MAAQA,EACdra,EAAM8rB,OAAS3H,EAAM9J,MACrBra,EAAMua,MAAQA,EACdva,EAAM+rB,OAAS5H,EAAM5J,MACrBva,EAAMwhB,iBAAmBA,EACzBxhB,EAAM5C,QAAUA,EAChB4C,EAAMmkB,MAAQA,EAEdnkB,EAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtEkb,YAAa5U,IAAK,WAAW,MAAO4U,IAAc3U,IAAK,SAASvG,GAAGkb,EAAWlb,IAC9E4lB,gBAAiBtf,IAAK,WAAW,MAAOsf,IAAkBrf,IAAK,SAASvG,GAAG4lB,EAAe5lB,IAC1FuU,WAAiBjO,IAAK,WAAW,MAAOiO,IAAahO,IAAK,SAASvG,GAAGuU,EAAUvU,IAChFyU,WAAenO,IAAK,WAAW,MAAOmO,IAAalO,IAAK,SAASvG,GAAGyU,EAAUzU,IAC9Ewa,cAAkBlU,IAAK,WAAW,MAAOkU,IAAgBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IACvFoO,QAAY9H,IAAK,WAAW,MAAO8H,IAAU7H,IAAK,SAASvG,GAAGoO,EAAOpO,IACrEqb,cAAkB/U,IAAK,WAAW,MAAO+U,IAAgB9U,IAAK,SAASvG,GAAGqb,EAAarb,IACvFiqB,eAAmB3jB,IAAK,WAAW,MAAO2jB,IAAiB1jB,IAAK,SAASvG,GAAGiqB,EAAcjqB,IAC1Fs/B,gBAAoBh5B,IAAK,WAAW,MAAOg5B,IAAkB/4B,IAAK,SAASvG,GAAGs/B,EAAet/B,IAC7F2/B,oBAA0Br5B,IAAK,WAAW,MAAOq5B,IAAsBp5B,IAAK,SAASvG,GAAG2/B,EAAmB3/B,IAC3G8/B,YAAkBx5B,IAAK,WAAW,MAAOw5B,IAAcv5B,IAAK,SAASvG,GAAG8/B,EAAW9/B,IACnF0lB,aAAiBpf,IAAK,WAAW,MAAOof,IAAenf,IAAK,SAASvG,GAAG0lB,EAAY1lB,IACpFmmB,aAAkB7f,IAAK,WAAW,MAAOgY,GAAM5f,UAAY6H,IAAK,SAASvG,GAAGse,EAAM5f,OAAOsB,KACzF2e,aAAcrY,IAAK,WAAW,MAAOgY,GAAMK,eAAiBpY,IAAK,SAASvG,GAAGse,EAAMK,YAAY3e,KAG/FpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACtC9E,SAAV8E,EAAElD,MACFF,EAAOE,IAAMkD,EAAElD,IACfse,EAAYpb,EAAElD,KAElBF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,OAE/DypB,aAAchgB,IAAK,WAAW,MAAOgY,GAAM1hB,QAAS2J,IAAK,SAASvG,GAC9Dse,EAAM1hB,OAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASwhB,EAAM1hB,OAAOE,IACvEwhB,EAAM1hB,OAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASmQ,EAAM1hB,OAAOuR,MACvEmQ,EAAM1hB,OAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAASoQ,EAAM1hB,OAAOsR,OACvEoQ,EAAM1hB,OAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASyhB,EAAM1hB,OAAOC,OAE3EiD,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClBipB,EAAQjpB,SAASA,GACjB0U,EAAM1U,SAASA,GACf4U,EAAM5U,SAASA,KAEnB6D,OAAS2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GACnD2D,EAAQtM,EAAGG,MAAMsQ,SAAS9H,GAC1Bmb,EAAOxX,MAAMA,GACbolB,EAAQplB,MAAMA,GACd2a,EAAM3a,MAAMA,KAEhBxE,GAAImH,IAAK,WAAW,MAAOyiB,GAAQ5pB,KAAOoH,IAAK,SAASvG,GACpD+oB,EAAQ5pB,EAAEa,GACVse,EAAMnf,EAAEa,KAEZ4G,GAAIN,IAAK,WAAW,MAAOyiB,GAAQniB,KAAOL,IAAK,SAASvG,GACpD+oB,EAAQniB,EAAE5G,GACVse,EAAM1X,EAAE5G,KAEZ+U,iBAAkBzO,IAAK,WAAW,MAAOyO,IAAmBxO,IAAK,SAASvG,GACtE+U,EAAkB/U,EAClB0U,EAAMhF,OAAQqF,EAAkB,QAAU,UAE9C2G,yBAA0BpV,IAAK,WAAW,MAAOoV,IAA2BnV,IAAK,SAASvG,GACtF0b,IAA4B1b,EAC5B7F,EAAMof,aAAavZ,GACnB7F,EAAM8iB,YAAYjd,GAClB+oB,EAAQtE,QAAQlL,aAAavZ,OAIrC3I,EAAGG,MAAMkW,eAAevT,EAAO4uB,GAC/B1xB,EAAGG,MAAMqP,YAAY1M,GAEdA,GAGX9C,EAAGI,OAAOuoC,0BAA4B,WACpC,MAAO3oC,GAAGI,OAAOmnC,mBACdhiC,QAASsR,OAAQ,KACjBwX,aAAa,IC9pBlBruB,EAAGI,OAAOwoC,SAAW,WACjB,YAyCA,SAASC,GAA0B1gC,GAC/B,GAAI2gC,GAAcC,EAAmB5gC,EACrC,OAAG2gC,GAAc,GACN,IAGA,EAIf,QAASC,GAAmB5gC,GACxB,GAAI8xB,GAAavwB,KAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,KACnDkyB,EAAWtwB,KAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,EAAIK,EAAEwa,MACvDmmB,GAAiB7O,EAAaD,GAAY,GAAM,IAAMtwB,KAAK+P,IAAO,EACtE,OAAOqvB,GAGX,QAASE,GAAsB7gC,GAC3B,GAAI8xB,GAAavwB,KAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,KACnDkyB,EAAWtwB,KAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,EAAIK,EAAEwa,KAC3D,QAAQqX,EAAWC,IAAe,EAAIvwB,KAAK+P,IAG/C,QAASwvB,GAAsB9gC,GAC3B,GAAI8xB,GAAavwB,KAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,KACnDkyB,EAAWtwB,KAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,EAAIK,EAAEwa,MAEvDjT,EAAOsqB,EAAWC,CACtB,OAAOvqB,GAAO6sB,EAIlB,QAAS2M,GAAatnC,EAAEoB,GACpB,GAAImmC,GAAK3oC,GAAGynB,YAAYngB,EAAEnB,UAAWyD,EAAKtC,EAAGsC,EAAKtC,EAAIsC,EAAKuY,KAC3DymB,EAAK5oC,GAAGynB,YAAY1Y,EAAE5I,UAAWyD,EAAKmF,EAAG,IACzC85B,EAAK7oC,GAAGynB,YAAY1Y,EAAE9I,SAAU2D,EAAKmF,EAAI,GAAK,EAAG+Z,GAEjD,OAAU,KAANtmB,EACO,WAAY,MAAO+3B,GAAIn5B,IAGvB,SAAUoU,GAGb,MAFAlO,GAAEnB,OAAOwiC,EAAGnzB,IACZzG,EAAE5I,OAAOyiC,EAAGpzB,IAAIvP,MAAM4iC,EAAGrzB,IAClB+kB,EAAIn5B,IAKvB,QAAS0nC,GAAenhC,GACpB,GAAIohC,GAAM/oC,GAAGynB,aAAangB,EAAGK,EAAEsY,GAAIkC,GAAIxa,EAAEqhC,IAAKj6B,EAAGpH,EAAE4d,GAAItO,GAAItP,EAAEshC,KAAMthC,EAEnE,OAAO,UAAU6N,GACb,GAAIqI,GAAIkrB,EAAIvzB,EAOZ,OALA7N,GAAEsY,GAAKpC,EAAEvW,EACTK,EAAEqhC,IAAMnrB,EAAEsE,GACVxa,EAAE4d,GAAK1H,EAAE9O,EACTpH,EAAEshC,IAAMprB,EAAE5G,GAEHsjB,EAAI1c,IAInB,QAASqrB,GAAmBt/B,GACxB,GAAIwhB,GAAIpf,EAAIpC,EACPu/B,GAAc/d,KAAI+d,EAAc/d,MACrC,IAAIge,GAAKD,EAAc/d,EACvBge,GAAGjnB,GAAKvY,EAAKuY,GACbinB,EAAG9hC,EAAIsC,EAAKtC,EACZ8hC,EAAGnyB,GAAKrN,EAAKqN,GACbmyB,EAAGr6B,EAAInF,EAAKmF,EAGhB,QAASs6B,GAA2BxhB,GAChCA,EAAMpe,QAAQ,SAASsI,GACnB,GAAIqZ,GAAIpf,EAAI+F,GACRq3B,EAAKD,EAAc/d,EAEnBge,IACAr3B,EAAEi3B,IAAMI,EAAGjnB,GACXpQ,EAAEkO,GAAKmpB,EAAG9hC,EACVyK,EAAEk3B,IAAMG,EAAGnyB,GACXlF,EAAEwT,GAAK6jB,EAAGr6B,IAGVgD,EAAEi3B,IAAMj3B,EAAEoQ,GACVpQ,EAAEkO,GAAKlO,EAAEzK,EACTyK,EAAEk3B,IAAMl3B,EAAEkF,GACVlF,EAAEwT,GAAKxT,EAAEhD,GAEbm6B,EAAmBn3B,KAI3B,QAASu3B,GAAU3hC,GACf,GAAIgW,GAASjX,EAAUK,UAAS,QAC5BisB,EAAOtsB,EAAUK,UAAS,OAG9B4W,GAAOhQ,aAAaxG,KAAI,UAAW,GAGnCyC,EAAOjC,EAEPqrB,EAAKrlB,aACA1F,SAASA,GACT+yB,UAAS,IAAM0N,GACf7kC,KAAI,MAAQ,SAASzC,GAGlB,GAAGA,EAAEkG,GAAKK,EAAEL,GAAKlG,EAAEkG,EAAKK,EAAEL,EAAIK,EAAEwa,IACzB/gB,EAAEmoC,OAAS5hC,EAAE4hC,MAAK,CAEjB,GAAIt4B,GAAajR,GAAG2G,OAAOpG,KAAK0Q,YAC5Bu4B,EAAUv4B,EAAWtK,OAAM,OAG/B6iC,GAAQ77B,aAAa1F,SAASA,GAC7ByJ,KAAM,SAAStQ,GAAG,MAAOg7B,GAAYh7B,KACrC+F,KAAI,UAAY,SAASQ,GACtB,MAAG8gC,GAAsB9gC,GACd,EAGA,IAGdR,KAAI,YAAc,WACf,GAAIP,GAAQrG,KAAKkpC,UAAU7iC,KAC3B,IAAe,IAAZxF,EAAEmoC,MACL,MAAO,aAAgB3iC,EAAQ,EAAI,GAAO,KACrC,IAAGxF,EAAEmoC,QAAU5hC,EAAE4hC,MAClB,MAAO,cAAgBx6B,EAAE3N,EAAE2N,GAAK,GAAK,KAGrC,IAAIu5B,GAAcC,EAAmBnnC,GACjCsoC,EAAWrB,EAA0BjnC,EACzC,OAAiB,KAAbsoC,EACO,UAAWpB,EAAY,eAAkBv5B,EAAE3N,EAAE2N,GAAK,GAAK,MAGvD,UAAWu5B,EAAY,eAAkBv5B,EAAE3N,EAAE2N,GAAKnI,EAAQ,GAAK,aAAe8iC,EAAW,SAchI,QAASpnC,GAAMsB,GAoJX,MAnJAqO,GAAYW,QAEZhP,EAAUC,KAAK,SAASC,GACpB4C,EAAY1G,GAAG2G,OAAOpG,MACtB4E,EAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,GAC9D+jB,EAAS5f,KAAKF,IAAI7D,EAAgBC,GAAmB,EAErD2J,EAAE9I,OAAO,EAAG6iB,GAGZ,IAAIhiB,GAAOJ,EAAUC,OAAM,6BACtBG,GAAK,GAAG,GAKTA,EAAKK,KAAI,YAAc,cAAiBhC,EAAiB,EAAKJ,EAAOC,KAAOD,EAAOuR,OAAS,KAAQlR,EAAkB,EAAKL,EAAOE,IAAMF,EAAOsR,QAAU,KAJzJvP,EAAOJ,EAAUQ,OAAM,KAClBC,KAAI,QAAU,qCAAuC4C,GACrD5C,KAAI,YAAc,cAAiBhC,EAAiB,EAAKJ,EAAOC,KAAOD,EAAOuR,OAAS,KAAQlR,EAAkB,EAAKL,EAAOE,IAAMF,EAAOsR,QAAU,KAK7J3P,EAAUvF,GAAE,QAAU,SAAUwG,EAAGnF,GAC/BtC,EAAS8gB,YACLld,KAAM6D,EACNiB,MAAOpG,EACPiK,IAAKzM,GAAGuE,MACRwF,GAAIA,MAIZ4/B,EAAUn+B,MAAMo+B,EAAMC,IAASD,EAAY,MAI3C,IAAI/hB,GAAQ8hB,EAAU9hB,MAAM/jB,EAAK,IAAIgT,SAErCuyB,GAA2BxhB,EAC3B,IAAIiiB,GAAKhjC,EAAKC,UAAS,kBAAmBjD,KAAK+jB,EAAO7b,GAGlD+9B,EAAMD,EAAG7iC,QACRC,OAAM,KACNC,KAAI,QAAO,gBAEhB4iC,GAAI7iC,OAAM,QACLC,KAAI,IAAMozB,GACVtwB,MAAK,OAAS,SAAUtC,GACrB,MAAIA,GAAEmE,MACKnE,EAAEmE,MAGFA,EADFk+B,GACSriC,EAAEsiC,SAAWtiC,EAAIA,EAAEuiC,QAAQpoC,KAG5B6F,EAAE7F,QAGtBmI,MAAK,SAAW,QAChB9I,GAAE,QAAU,SAASwG,EAAEnF,GACpB8mC,EAAU3hC,GACVzH,EAASqG,cACLzC,KAAM6D,EACNiB,MAAOpG,MAGdrB,GAAE,YAAc,SAASwG,EAAEnF,GACxBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GAAMD,MAAK,UAAY,IACxD/J,EAASsb,kBACL1X,KAAM6D,EACNmE,MAAO9L,GAAG2G,OAAOpG,MAAM0J,MAAK,QAC5BiC,QAASs8B,EAAsB7gC,OAGtCxG,GAAE,WAAa,SAASwG,EAAEnF,GACvBxC,GAAG2G,OAAOpG,MAAM2J,QAAO,SAAU,GAAOD,MAAK,UAAY,GACzD/J,EAASsF,iBACL1B,KAAM6D,MAGbxG,GAAE,YAAc,SAASwG,EAAEnF,GACxBtC,EAASmG,kBACLvC,KAAM6D,MAOlBmiC,EAAGjmC,KAAK,SAAS8D,GACb3H,GAAG2G,OAAOpG,MAAMoG,OAAM,QACjBgH,aACA1F,SAASA,GACT+yB,UAAS,IAAM8N,KAGrB7N,IAEC6O,EAAG/iC,UAAS,QAASc,SAGrBiiC,EAAG5iC,OAAM,QACJwK,KAAM,SAAStQ,GAAI,MAAOg7B,GAAYh7B,KACtCuM,aACA1F,SAASA,GACTd,KAAI,UAAY,SAASQ,GACtB,MAAG8gC,GAAsB9gC,GACd,EAGA,IAGdR,KAAI,YAAc,SAASQ,GACxB,GAAIf,GAAQrG,KAAKkpC,UAAU7iC,KAC3B,IAAe,IAAZe,EAAE4hC,MACD,MAAO,sBAAyB3iC,EAAQ,EAAI,GAAM,KAGlD,IAAI0hC,GAAcC,EAAmB5gC,GACjC+hC,EAAWrB,EAA0B1gC,EACzC,OAAiB,KAAb+hC,EACO,UAAWpB,EAAY,eAAkBv5B,EAAEpH,EAAEoH,GAAK,GAAK,MAGvD,UAAWu5B,EAAY,eAAkBv5B,EAAEpH,EAAEoH,GAAKnI,EAAQ,GAAK,aAAe8iC,EAAW,OAOpHJ,EAAUzhB,EAAMA,EAAMjmB,OAAS,IAI/BkoC,EAAGliC,OACE+F,aACA1F,SAASA,GACTd,KAAI,UAAW,GACftD,KAAI,MAAO,SAAS8D,GACjB,GAAIyjB,GAAIpf,EAAIrE,EACZwhC,GAAc/d,GAAK/nB,SAEtBwE,WAIToK,EAAYS,UAAS,sBACdpQ,EAnVX,GA0BIsH,GAAMzE,EAAgBC,EAAiB0jB,EA1BvC/jB,GAAUE,IAAK,EAAGqR,MAAO,EAAGD,OAAQ,EAAGrR,KAAM,GAC3C4B,EAAQ,IACRC,EAAS,IACTgjC,EAAO,QACPD,GAASO,MAAO,SAASxiC,GAAK,MAAO,IAAM6D,MAAO,SAAS7D,GAAK,MAAOA,GAAE6D,OAAS7D,EAAEuH,MAAQA,KAAM,SAASvH,GAAK,MAAOA,GAAE6D,OAAS7D,EAAEuH,OACpInF,EAAKb,KAAKwB,MAAsB,IAAhBxB,KAAKyB,UACrBjE,EAAY,KACZoF,EAAQtM,EAAGG,MAAMuQ,eACjB+qB,GAAa,EACbmB,EAAc,SAASz0B,GAAG,MAAY,UAATkiC,EAAyBliC,EAAE7F,KAAO,KAAO6F,EAAE6D,MAAkB7D,EAAE7F,KAAO,KAAO6F,EAAE6D,OAAS7D,EAAEuH,OACvH6sB,EAAiB,IACjB3mB,EAAO,SAASg1B,EAAIC,GAAI,MAAOD,GAAGtoC,KAAOuoC,EAAGvoC,MAC5CkK,EAAM,SAASrE,EAAEnF,GAAG,MAAOmF,GAAE7F,MAC7BkoC,GAAqB,EACrB/hC,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,aAAe,eAAgB,kBAAmB,mBAAoB,mBAAoB,kBAAmB,aAMrIoH,EAAItH,GAAG8H,MAAMC,SAAS9B,OAAO,EAAG,EAAIiD,KAAK+P,KACzClK,EAAI/O,GAAG8H,MAAM+gB,OAEb8gB,EAAY3pC,GAAGmoB,OAAOwhB,YAAYv0B,KAAKA,GAGvC+zB,KAEA5O,EAAMv6B,GAAG0V,IAAI6kB,MACZd,WAAW,SAAS9xB,GAAI,MAAOuB,MAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,OACrEkyB,SAAS,SAAS7xB,GAAI,MAAOuB,MAAKL,IAAI,EAAGK,KAAKF,IAAI,EAAIE,KAAK+P,GAAI3R,EAAEK,EAAEL,EAAIK,EAAEwa,QACzEwX,YAAY,SAAShyB,GAAI,MAAOuB,MAAKL,IAAI,EAAGkG,EAAEpH,EAAEoH,MAChDyrB,YAAY,SAAS7yB,GAAI,MAAOuB,MAAKL,IAAI,EAAGkG,EAAEpH,EAAEoH,EAAIpH,EAAEsP,OA4JvDhF,EAAczS,EAAGG,MAAMsS,YAAY/R,EA0LvC,OA7BAoC,GAAMpC,SAAWA,EACjBoC,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAE1CA,EAAMgM,SAAWC,OAAOC,WAEpB5H,OAAa6H,IAAK,WAAW,MAAO7H,IAAS8H,IAAK,SAASvG,GAAGvB,EAAMuB,IACpEtB,QAAa4H,IAAK,WAAW,MAAO5H,IAAU6H,IAAK,SAASvG,GAAGtB,EAAOsB,IACtE0hC,MAAap7B,IAAK,WAAW,MAAOo7B,IAAQn7B,IAAK,SAASvG,GAAG0hC,EAAK1hC,IAClE4B,IAAa0E,IAAK,WAAW,MAAO1E,IAAM2E,IAAK,SAASvG,GAAG4B,EAAG5B,IAC9DF,UAAawG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GAAGF,EAASE,IAC1E6hC,oBAAqBv7B,IAAK,WAAW,MAAOu7B,IAAsBt7B,IAAK,SAASvG,GAAG6hC,IAAqB7hC,IACxG8yB,YAAaxsB,IAAK,WAAW,MAAOwsB,IAAcvsB,IAAK,SAASvG,GAAG8yB,IAAa9yB,IAChFi0B,aAAc3tB,IAAK,WAAW,MAAO2tB,IAAe1tB,IAAK,SAASvG,GAAGi0B,EAAYj0B,IACjF4zB,gBAAiBttB,IAAK,WAAW,MAAOstB,IAAkBrtB,IAAK,SAASvG,GAAG4zB,EAAe5zB,IAC1FiN,MAAO3G,IAAK,WAAW,MAAO2G,IAAQ1G,IAAK,SAASvG,GAAGiN,EAAKjN,IAC5D6D,KAAMyC,IAAK,WAAW,MAAOzC,IAAO0C,IAAK,SAASvG,GAAG6D,EAAI7D,IAEzDpD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAqB5B,QAAZ8E,EAAElD,IAAsBkD,EAAElD,IAASF,EAAOE,IAC1DF,EAAOuR,MAAqBjT,QAAZ8E,EAAEmO,MAAsBnO,EAAEmO,MAASvR,EAAOuR,MAC1DvR,EAAOsR,OAAqBhT,QAAZ8E,EAAEkO,OAAsBlO,EAAEkO,OAAStR,EAAOsR,OAC1DtR,EAAOC,KAAqB3B,QAAZ8E,EAAEnD,KAAsBmD,EAAEnD,KAASD,EAAOC,OAE9D8G,OAAQ2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GAClD2D,EAAMtM,EAAGG,MAAMsQ,SAAS9H,OAIhC3I,EAAGG,MAAMqP,YAAY1M,GACdA,GC/XX9C,EAAGI,OAAO0qC,cAAgB,WACtB,YAoCA,SAAShoC,GAAMsB,GAkCX,MAjCAqO,GAAYW,QACZX,EAAYrS,OAAOwoC,GAEnBxkC,EAAUC,KAAK,SAASC,GACpB,GAAI4C,GAAY1G,GAAG2G,OAAOpG,KAE1Bf,GAAGG,MAAMsW,QAAQvP,EAEjB,IAAIvB,GAAiB3F,EAAGG,MAAMwF,eAAeyB,EAAOF,EAAW3B,GAC3DK,EAAkB5F,EAAGG,MAAMyF,gBAAgByB,EAAQH,EAAW3B,EAYlE,OAVAzC,GAAMqR,OAAS,WACM,IAAb1L,EACAvB,EAAU9F,KAAK0B,GAEfoE,EAAUiH,aAAa1F,SAASA,GAAUrH,KAAK0B,IAGvDA,EAAMoE,UAAYA,EAGb5C,GAASA,EAAKlC,QAIf8E,EAAUK,UAAS,cAAec,SAGtCugC,EAASxhC,MAAMzB,GAAgB0B,OAAOzB,GAAiBL,OAAOA,OAC9D2B,GAAU9F,KAAKwnC,KAPX5oC,EAAGG,MAAM4W,OAAOjU,EAAOoE,GAChBpE,KASf2P,EAAYS,UAAS,2BACdpQ,EAhEX,GAAI8lC,GAAW5oC,EAAGI,OAAOwoC,WACrB1oC,EAAUF,EAAGI,OAAOF,UAEpBqF,GAAUE,IAAK,GAAIqR,MAAO,GAAID,OAAQ,GAAIrR,KAAM,IAC9C4B,EAAQ,KACRC,EAAS,KACTiF,EAAQtM,EAAGG,MAAMuQ,eACjBqsB,GAAqB,EAErB5Z,GADKzZ,KAAK6E,MAAsB,IAAhB7E,KAAKyB,UACN,MACf4L,EAAS,KACTtO,EAAW,IACX/H,EAAWF,GAAGE,SAAQ,cAAgB,cAAY,aAOpD+R,EAAczS,EAAGG,MAAMsS,YAAY/R,EA+GvC,OA7GAR,GACKuI,SAAS,GACT+C,eAAc,GACdC,eAAe,SAAStD,GAAG,MAAOA,KA+CvCygC,EAASloC,SAASiB,GAAE,2BAA6B,SAASoc,GACtDA,EAAI3R,QACAI,IAAKuR,EAAIzZ,KAAKhC,KACd0J,MAAQ+R,EAAIzZ,KAAK0H,OAAS+R,EAAIzZ,KAAKoL,KACnCpD,MAAOyR,EAAIzR,MACXI,QAASqR,EAAIrR,SAEZqwB,UACMhf,GAAIrR,cACJqR,GAAI3R,OAAOM,SAEtBxM,EAAQoE,KAAKyZ,GAAK7X,QAAO,KAG7B0iC,EAASloC,SAASiB,GAAE,0BAA4B,SAASoc,GACrD7d,EAAQgG,QAAO,KAGnB0iC,EAASloC,SAASiB,GAAE,2BAA6B,SAASoc,GACtD7d,MAQJ4C,EAAMpC,SAAWA,EACjBoC,EAAM8lC,SAAWA,EACjB9lC,EAAM5C,QAAUA,EAChB4C,EAAM8L,QAAU5O,EAAGG,MAAM0O,YAAYhO,KAAKiC,GAG1CA,EAAMgM,SAAWC,OAAOC,WAEpB+H,QAAqB9H,IAAK,WAAW,MAAO8H,IAAwB7H,IAAK,SAASvG,GAAGoO,EAAOpO,IAC5Fwa,cAAqBlU,IAAK,WAAW,MAAOkU,IAAwBjU,IAAK,SAASvG,GAAGwa,EAAaxa,IAClGo0B,oBAAqB9tB,IAAK,WAAW,MAAO8tB,IAAwB7tB,IAAK,SAASvG,GAAGo0B,EAAmBp0B,IAGxG2D,OAAQ2C,IAAK,WAAW,MAAO3C,IAAS4C,IAAK,SAASvG,GAClD2D,EAAQ3D,EACRigC,EAASt8B,MAAMA,KAEnB7D,UAAWwG,IAAK,WAAW,MAAOxG,IAAYyG,IAAK,SAASvG,GACxDF,EAAWE,EACX8J,EAAYW,MAAM3K,GAClBmgC,EAASngC,SAASA,KAEtBlD,QAAS0J,IAAK,WAAW,MAAO1J,IAAU2J,IAAK,SAASvG,GACpDpD,EAAOE,IAAsB5B,SAAb8E,EAAElD,IAAuBkD,EAAElD,IAASF,EAAOE,IAC3DF,EAAOuR,MAAsBjT,SAAb8E,EAAEmO,MAAuBnO,EAAEmO,MAASvR,EAAOuR,MAC3DvR,EAAOsR,OAAsBhT,SAAb8E,EAAEkO,OAAuBlO,EAAEkO,OAAStR,EAAOsR,OAC3DtR,EAAOC,KAAsB3B,SAAb8E,EAAEnD,KAAuBmD,EAAEnD,KAASD,EAAOC,KAC3DojC,EAASrjC,OAAOA,OAGxBvF,EAAGG,MAAMkW,eAAevT,EAAO8lC,GAC/B5oC,EAAGG,MAAMqP,YAAY1M,GACdA;A3CjIX,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU;AACxE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACpD,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ;AACnE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,YAAY,CAAC,SAAS;AACzC;AACA,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE;AAC7B,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACrG,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,EAAE,EAAE,GAAG;AACvB,CAAC;AACD;AACA,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,CAAC,CAAC,UAAU,GAAG;AACxD;AACA,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ;AACzB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO;AACrG,EAAE,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,KAAK;AAClD,EAAE,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,SAAS,CAAC,IAAI;AACpE,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AAC/C,EAAE,CAAC,EAAE,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AAC/B,IAAI,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAChD,QAAQ,EAAE,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACzC,YAAY,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,QAAQ;AACtF,YAAY,KAAK,CAAC,GAAG,CAAC,SAAS,EAAE,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,GAAG;AACxG,QAAQ,CAAC;AACT;AACA,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;AAC7D,YAAY,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AAC3B,YAAY,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG;AAClC,YAAY,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;AAClC,gBAAgB,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK;AAClE,wBAAwB,CAAC,CAAC,IAAI;AAC9B,wBAAwB,CAAC,CAAC,KAAK,CAAC;AAChC,oBAAoB,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,IAAI;AACzE,YAAY,EAAE;AACd;AACA,QAAQ,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC;AACxC,QAAQ,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,GAAG;AACtC,QAAQ,MAAM,CAAC,MAAM,CAAC;AACtB,IAAI,EAAE;AACN,CAAC;AACD;AACA,EAAE,EAAE,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACvD,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AACb,IAAI,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,QAAQ,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,GAAG;AACxC,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE,UAAU,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,QAAQ,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,GAAG;AACtC,QAAQ,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC;AAChE,QAAQ,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK;AAC5G,IAAI,GAAG;AACP,CAAC;AACD;AACA,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK;AAC3E,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI;AACjF,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ;AAChF,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC;AACnC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrB,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC;AACrE,QAAQ,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,SAAS,EAAE;AAC9C,IAAI,IAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;AACvG,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE;AACrE,QAAQ,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,SAAS,EAAE;AACtC,IAAI,CAAC;AACL,IAAI,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3C,EAAE;AACF;AACA,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,SAAS;AAChE,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,IAAI,EAAE,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AAClC,QAAQ,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI;AACvF,IAAI,CAAC;AACL,EAAE;AACF;AACA,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS;AAC7D,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC;AACnC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC,OAAO,CAAC;AAChF,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC;AACnC,IAAI,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI;AACxD,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AACrB;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,IAAI,EAAE,CAAC,QAAQ,CAAC,YAAY,GAAG;AAC/B;AACA,IAAI,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACjC,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC;AACzB;AACA,QAAQ,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;AACxE,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,GAAG;AACrC,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,EAAE;AACjF,QAAQ,CAAC;AACT;AACA,QAAQ,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC;AACA,QAAQ,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;AACrC,YAAY,UAAU,CAAC,UAAU,EAAE;AACnC,QAAQ,CAAC;AACT,QAAQ,IAAI,CAAC,CAAC;AACd,YAAY,EAAE,CAAC,QAAQ,CAAC,UAAU,GAAG;AACrC,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;AACrC,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,UAAU,CAAC,UAAU,EAAE;AAC3B,EAAE;AACF;AACA,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;AACzB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB;AACA,EAAE;AACF,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC;AACvF,EAAE,CAAC,QAAQ,EAAE;AACb,IAAI,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AACxB,IAAI,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AACxB,EAAE;AACF;AACA,EAAE;AACF;AACA,EAAE,CAAC,QAAQ,EAAE,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrD;AACA,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,OAAO;AACnF,EAAE,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ;AACtF,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;AAChF;AACA,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;AACzF,EAAE;AACF,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7B,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC;AACnD,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,GAAG;AAC/D,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE;AAC9B;AACA,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;AAC5B,QAAQ,EAAE,CAAC,MAAM,GAAG;AACpB,IAAI,CAAC;AACL,EAAE;AACF;AACA,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO;AACxB,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;AACtB,CAAC;AACD;AACA,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACrC,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACjB,CAAC;ACzJD,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU,CAAC;AAC5C,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;AACxD,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,CAAC,SAAS,CAAC;AACtD,CAAC,CAAC,CAAC,eAAe,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;AAClC,CAAC,EAAE,CAAC;AACJ,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AACpC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AACrC,EAAE,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACnC,CAAC,CAAC,CAAC;AACH,CAAC,MAAM,CAAC,QAAQ,GAAG,CAAC;AACpB,EAAE,CAAC;AACH,CAAC;AACD,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC;AAC3C,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;AACxD,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,CAAC,SAAS,CAAC;AACtD,CAAC,CAAC,CAAC,eAAe,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;AAClC,CAAC,EAAE,CAAC;AACJ,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AACnC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AACrC,EAAE,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC;AACpC,CAAC,CAAC,CAAC;AACH,CAAC,MAAM,CAAC,QAAQ,GAAG,CAAC;AACpB,EAAE,CAAC;ACxBH,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC;AAC5D,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ;AAC9F,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC;AAC5F;AACA,CAAC,QAAQ,CAAC,gBAAgB,CAAC,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI;AAC3G,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC;AACtF,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC;AAClF,CAAC,EAAE;AACH,EAAE,CAAC,oBAAoB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC;AAC5G,QAAQ,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI;AACxB,QAAQ,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI;AACzB,QAAQ,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACtC,QAAQ,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,cAAc,EAAE;AAClJ,QAAQ,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI;AAChC,QAAQ,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC;AAC5F,QAAQ,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACzC,QAAQ,CAAC,GAAG,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,aAAa,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC;AACjG,IAAI,CAAC;AACL;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC;AACrB,QAAQ,CAAC,MAAM,CAAC,KAAK,EAAE;AACvB;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,EAAE;AACnF,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,oBAAoB,EAAE;AAC/E,gBAAgB,CAAC,IAAI,EAAE,IAAI,GAAG;AAC9B,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE;AACxC,gBAAgB,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,oBAAoB,GAAG;AAC/E,YAAY,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC,oBAAoB,GAAG;AAC1E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,MAAM,CAAC;AACvB,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,YAAY,EAAE,CAAC,CAAC;AACrC,gBAAgB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE;AAC7C,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE;AACxC,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE;AACxC,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC;AAC1C,gBAAgB,GAAG,CAAC,iBAAiB,CAAC,CAAC,CAAC,KAAK,CAAC;AAC9C,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7B,oBAAoB,EAAE;AACtB,qBAAqB,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;AAC/F,qBAAqB,EAAE,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM;AACzF,qBAAqB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;AAC1C,qBAAqB,OAAO,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,WAAW;AACnF,qBAAqB,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AAClF,qBAAqB,EAAE;AACvB,oBAAoB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC;AAC9C,oBAAoB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC;AAC9C;AACA,oBAAoB,EAAE;AACtB,qBAAqB,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC;AACxF,qBAAqB,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE;AACzG,qBAAqB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO;AAClG,qBAAqB,EAAE,CAAC,OAAO,CAAC;AAChC,qBAAqB,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAChG,qBAAqB,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK;AACvF,qBAAqB,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC;AACnE,qBAAqB,EAAE;AACvB,oBAAoB,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;AAC3D,wBAAwB,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC;AAC/C,oBAAoB,CAAC;AACrB;AACA,oBAAoB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;AAC/E,wBAAwB,iBAAiB,CAAC,CAAC,CAAC,IAAI,CAAC;AACjD,oBAAoB,CAAC;AACrB;AACA,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;AACpC,oBAAoB,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;AAC1C,oBAAoB,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC;AACzC,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC;AAChE,iBAAiB,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC;AAC1C,iBAAiB,EAAE;AACnB,gBAAgB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC;AAChD,oBAAoB,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,eAAe;AAC1E,oBAAoB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,aAAa,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,aAAa,CAAC,eAAe,CAAC,GAAG,CAAC,SAAS,CAAC;AACvG,oBAAoB,EAAE,CAAC,iBAAiB;AACxC,oBAAoB,CAAC,CAAC,CAAC;AACvB;AACA,oBAAoB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACjC,wBAAwB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,aAAa;AAClD,4BAA4B,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,aAAa,CAAC,eAAe,CAAC,GAAG,CAAC,SAAS;AACnF,4BAA4B,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,SAAS;AAC9E,gCAAgC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,aAAa,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,oBAAoB,GAAG,CAAC,CAAC;AAC3G;AACA,4BAA4B,MAAM,CAAC;AACnC,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC;AACvC,wBAAwB,MAAM,CAAC,CAAC,MAAM;AACtC,oBAAoB,GAAG;AACvB,oBAAoB,KAAK,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,SAAS;AACrE,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AACzC,oBAAoB,MAAM,CAAC;AAC3B,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE;AAC1C,gBAAgB,CAAC;AACjB;AACA;AACA,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE;AAC7E,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,SAAS,CAAC;AAC5C;AACA,gBAAgB,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM;AACrD,gBAAgB,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AACrC,oBAAoB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7E,oBAAoB,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI;AAC3D,oBAAoB,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;AACtF,wBAAwB,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7F,oBAAoB,CAAC;AACrB,oBAAoB,IAAI,CAAC,CAAC;AAC1B,wBAAwB,QAAQ,CAAC,eAAe,EAAE;AAClD,4BAA4B,MAAM,CAAC,CAAC,MAAM,CAAC;AAC3C,4BAA4B,MAAM,CAAC,CAAC,MAAM;AAC1C,wBAAwB,GAAG;AAC3B,wBAAwB,KAAK,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,SAAS;AACzE,wBAAwB,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7C,wBAAwB,MAAM,CAAC;AAC/B,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB,gBAAgB,IAAI,CAAC,CAAC;AACtB,oBAAoB,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE;AACxD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,QAAQ,CAAC,gBAAgB,EAAE;AAC3C,oBAAoB,MAAM,CAAC,CAAC,MAAM,CAAC;AACnC,oBAAoB,MAAM,CAAC,CAAC,MAAM,CAAC;AACnC,oBAAoB,WAAW,CAAC,CAAC,WAAW;AAC5C,gBAAgB,GAAG;AACnB;AACA,gBAAgB,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,eAAe;AACzE,gBAAgB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACnD,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC;AACvC,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC;AACvC,wBAAwB,WAAW,CAAC,CAAC,WAAW;AAChD,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,YAAY;AACrE,gBAAgB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,YAAY,EAAE;AAC3C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC;AACvC,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC;AACvC,wBAAwB,WAAW,CAAC,CAAC,WAAW;AAChD,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,gBAAgB;AAC9E,gBAAgB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACpD,iBAAiB,QAAQ,CAAC,gBAAgB,EAAE;AAC5C,kBAAkB,MAAM,CAAC,CAAC,MAAM,CAAC;AACjC,kBAAkB,MAAM,CAAC,CAAC,MAAM,CAAC;AACjC,kBAAkB,WAAW,CAAC,CAAC,WAAW;AAC1C,iBAAiB,GAAG;AACpB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,cAAc;AAC5E,gBAAgB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AAClD,iBAAiB,QAAQ,CAAC,cAAc,EAAE;AAC1C,kBAAkB,MAAM,CAAC,CAAC,MAAM,CAAC;AACjC,kBAAkB,MAAM,CAAC,CAAC,MAAM,CAAC;AACjC,kBAAkB,WAAW,CAAC,CAAC,WAAW;AAC1C,iBAAiB,GAAG;AACpB,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,YAAY;AACxB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,YAAY,CAAC;AAC7C,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC,IAAI,CAAC;AACnD,gBAAgB,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC;AAClD,gBAAgB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC;AACnD,gBAAgB,CAAC,EAAE,EAAE,OAAO,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC;AACjD,gBAAgB,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC;AAC7C,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,YAAY,CAAC;AAC1C,YAAY,CAAC;AACb;AACA,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC,YAAY,EAAE,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC;AAChE,YAAY,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gBAAgB,EAAE,CAAC,EAAE,aAAa,CAAC,CAAC,MAAM,CAAC;AAC3C,gBAAgB,EAAE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AAChF,gBAAgB,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;AACzC,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,oBAAoB,EAAE;AACtE,wBAAwB,CAAC,SAAS,EAAE,IAAI,EAAE;AAC1C,wBAAwB,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,EAAE;AAClF,oBAAoB,IAAI,CAAC,KAAK,EAAE;AAChC,wBAAwB,CAAC,MAAM,EAAE,IAAI,EAAE;AACvC,wBAAwB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE;AACtD,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AAC7D,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AAC7D,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,CAAC;AACpD,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;AACtC,oBAAoB,IAAI,CAAC,IAAI,GAAG,MAAM,GAAG;AACzC,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,IAAI,CAAC;AACL;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC7C,QAAQ,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC/E,QAAQ,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAChF,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5C,QAAQ,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAClB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC7C,QAAQ,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACnB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC7C,QAAQ,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACnB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC;AACpD,QAAQ,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC;AACnD,QAAQ,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AACF;AACA,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AAC9G,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5G;AACA,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC;AAC7E,CAAC,MAAM,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACrH,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC;AACpC;AACA,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,qBAAqB,CAAC,IAAI;AACvD;AACA,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC;AAChC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC;AACjH,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC;AACzD,CAAC,EAAE;AACH,EAAE,CAAC,iBAAiB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAChE,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,IAAI,CAAC;AACpB,IAAI,CAAC;AACL,IAAI,GAAG,CAAC,UAAU,CAAC;AACnB,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC1C,QAAQ,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACvB,QAAQ,CAAC;AACT,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;AACZ,QAAQ,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC;AAC/B,IAAI,CAAC;AACL,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI;AAC7E,QAAQ,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC;AAC1D,QAAQ,EAAE;AACV,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG;AACtE,QAAQ,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ;AAC5E,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS;AAC7E,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;AACtE,QAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO;AAC5E,QAAQ,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC;AAChD,QAAQ,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;AACxC,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC1D,IAAI,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,KAAK,GAAG;AACjD;AACA,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AAC9C,QAAQ,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC;AAC7B,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACrC,QAAQ,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,KAAK;AACzC,IAAI,CAAC;AACL;AACA,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG;AACzD,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,GAAG;AAClD;AACA,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AAC3C,QAAQ,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;AAC9B,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AAChF,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;AACZ,QAAQ,MAAM,CAAC,SAAS;AACxB,IAAI,CAAC;AACL,EAAE;AACF;AACA,EAAE;AACF,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,SAAS,CAAC;AACtE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE;AAC/D,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC;AACzB,CAAC,EAAE;AACH,EAAE,CAAC,iBAAiB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAChE,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,IAAI,CAAC;AACrD,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AACnE,YAAY,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AAC7B,YAAY,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,CAAC;AACT,IAAI,GAAG;AACP,IAAI,MAAM,CAAC,gBAAgB,CAAC;AAC5B,EAAE;AChVF;AACA,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC;AAC/D,CAAC,OAAO,CAAC,KAAK,CAAC;AACf,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG,OAAO,EAAE,CAAC,GAAG,QAAQ,CAAC,EAAE,CAAC;AACxD,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE;AACrB;AACA,CAAC,GAAG,GAAG,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC;AACjE,CAAC,EAAE;AACH,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,EAAE;AACN,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC,SAAS,CAAC;AAC3F,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC;AAC3B,IAAI,CAAC;AACL,QAAQ,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE;AACpB,QAAQ,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,QAAQ,MAAM,CAAC,CAAC,CAAC;AACjB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,GAAG;AAC/D,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,EAAE;AAC9D,QAAQ,CAAC;AACT,IAAI,CAAC;AACL,IAAI,EAAE;AACN,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC;AAC/H,QAAQ,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC;AAC5F,QAAQ,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC;AAChF,QAAQ,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC;AACjI,QAAQ,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;AAClG,QAAQ,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;AACtF,QAAQ,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,GAAG;AAC5F,QAAQ,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC;AAC3D,QAAQ,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;AACzF,QAAQ,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAC7F,QAAQ,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AAC/D,QAAQ,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC;AACrE,QAAQ,CAAC,GAAG,oBAAoB,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC;AACjI,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC;AACrD,IAAI,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,QAAQ,MAAM,CAAC,CAAC,CAAC;AACjB,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC;AACpD,IAAI,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,MAAM,CAAC,CAAC,CAAC;AACjB,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,MAAM,CAAC,CAAC,CAAC;AACjB,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC;AAC5E,IAAI,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC;AACrE,IAAI,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACzB,YAAY,MAAM,CAAC,GAAG;AACtB,QAAQ,CAAC;AACT;AACA,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,EAAE,KAAK,IAAI;AAC/D,QAAQ,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AAC5B,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAE;AACrD,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE;AAC1B,gBAAgB,CAAC,KAAK,GAAG,MAAM,EAAE,KAAK,GAAG;AACzC;AACA,YAAY,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE;AACnC,gBAAgB,CAAC,MAAM,EAAE,EAAE,EAAE;AAC7B,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;AACnC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE;AACjC,gBAAgB,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC;AACzC,gBAAgB,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK,GAAG;AAChD,QAAQ,CAAC;AACT;AACA,QAAQ,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAE;AACjD,YAAY,CAAC,IAAI,EAAE,CAAC,EAAE;AACtB,YAAY,CAAC,KAAK,GAAG,MAAM,EAAE,KAAK,GAAG;AACrC;AACA,QAAQ,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,SAAS,EAAE,EAAE,EAAE;AAClD,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE;AACrD,gBAAgB,CAAC,KAAK,EAAE;AACxB,gBAAgB,CAAC,MAAM,EAAE,EAAE,EAAE;AAC7B,gBAAgB,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,GAAG;AACzE;AACA,QAAQ,SAAS,CAAC,MAAM,EAAE,EAAE,EAAE;AAC9B,YAAY,CAAC,OAAO,EAAE,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC;AAC/C,YAAY,CAAC,MAAM,EAAE,GAAG,EAAE;AAC1B,YAAY,CAAC,KAAK,EAAE,UAAU,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG;AACtE;AACA,QAAQ,SAAS,CAAC,MAAM,EAAE,EAAE,EAAE;AAC9B,YAAY,CAAC,OAAO,EAAE,GAAG,EAAE,IAAI,CAAC;AAChC,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE;AAC7D,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AACnE;AACA,QAAQ,SAAS,CAAC,MAAM,EAAE,EAAE,EAAE;AAC9B,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC;AAClC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACxE;AACA,QAAQ,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,GAAG,MAAM,EAAE,EAAE,EAAE;AACxF,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC;AACrC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AACnF;AACA,QAAQ,SAAS,CAAC,SAAS,EAAE,EAAE,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC9B,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,CAAC,CAAC,GAAG,KAAK,IAAI,GAAG,EAAE,CAAC,CAAC,KAAK,GAAG;AAC3F,gBAAgB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;AAC/B,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,YAAY,CAAC,OAAO,EAAE;AACxE,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,YAAY,CAAC,OAAO,EAAE;AACrE,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,SAAS,CAAC;AAC1C,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AACnC,YAAY,IAAI,CAAC,EAAE,CAAC,EAAE,GAAG,CAAC,KAAK,EAAE,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG;AACjE,QAAQ,MAAM,CAAC,IAAI,CAAC;AACpB;AACA,IAAI,EAAE;AACN;AACA,IAAI,EAAE;AACN,KAAK,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC;AAChF,KAAK,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC;AACrC,KAAK,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;AACrB,QAAQ,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE;AACxB,QAAQ,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC;AACrB,KAAK,CAAC;AACN,KAAK,EAAE;AACP,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC/B,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnB,YAAY,IAAI,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3D,YAAY,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AACzD,QAAQ,EAAE;AACV;AACA,QAAQ,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,IAAI,EAAE,SAAS,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;AACjE,YAAY,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ;AAC3E,YAAY,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC;AAChC,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,qBAAqB,GAAG;AAC/D,YAAY,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;AACpC,YAAY,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC;AAClC,QAAQ,CAAC;AACT;AACA,QAAQ,MAAM,CAAC,GAAG,CAAC;AACnB,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC5B,YAAY,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;AAC7C,gBAAgB,MAAM,CAAC,IAAI,CAAC;AAC5B,YAAY,CAAC;AACb,YAAY,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACvE,YAAY,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;AAC9C,gBAAgB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AACtC,gBAAgB,MAAM,CAAC,IAAI,CAAC;AAC5B,YAAY,CAAC;AACb,QAAQ,CAAC;AACT,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO;AACrF,IAAI,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC;AAChC,IAAI,GAAG,CAAC,iBAAiB,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3C,QAAQ,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,GAAG,YAAY,CAAC;AACjD,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,GAAG,WAAW,CAAC;AAC/C,YAAY,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC;AACzF,YAAY,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC;AAC3F,YAAY,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC;AAC3B;AACA,QAAQ,EAAE,CAAC,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO;AAC9C,QAAQ,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC1B,YAAY,IAAI,CAAC,CAAC,CAAC,EAAE;AACrB,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1C,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC;AACxD,gBAAgB,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC;AACzD,gBAAgB,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,YAAY,CAAC;AAC5F,gBAAgB,KAAK,CAAC;AACtB,YAAY,IAAI,CAAC,CAAC,CAAC,EAAE;AACrB,gBAAgB,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChC,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,EAAE,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC;AACrF,gBAAgB,EAAE,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC;AAC1D,gBAAgB,EAAE,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,YAAY,CAAC;AAC7F,gBAAgB,KAAK,CAAC;AACtB,YAAY,IAAI,CAAC,CAAC,CAAC,EAAE;AACrB,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC;AAC3F,gBAAgB,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC/B,gBAAgB,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC;AACrF,gBAAgB,EAAE,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC;AAC7D,gBAAgB,EAAE,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC;AAC7F,gBAAgB,KAAK,CAAC;AACtB,YAAY,IAAI,CAAC,CAAC,CAAC,EAAE;AACrB,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1C,gBAAgB,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC;AACtD,gBAAgB,EAAE,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC;AAC7D,gBAAgB,EAAE,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC;AAC7F,gBAAgB,KAAK,CAAC;AACtB,YAAY,IAAI,CAAC,CAAC,MAAM,EAAE;AAC1B,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,KAAK,CAAC;AACtB,YAAY,OAAO,CAAC;AACpB,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACxB,gBAAgB,KAAK,CAAC;AACtB,QAAQ,CAAC;AACT;AACA,QAAQ,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,EAAE;AAC5C,IAAI,EAAE;AACN;AACA,IAAI,EAAE;AACN,KAAK,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC;AACrF,KAAK,EAAE;AACP,IAAI,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,QAAQ,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,GAAG;AACjC,gBAAgB,aAAa,CAAC,CAAC,CAAC,iBAAiB,CAAC,GAAG,EAAE;AACvD,gBAAgB,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC;AACrD,gBAAgB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC;AAClD;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU;AACrD,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACzB,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,SAAS,EAAE;AAChC,oBAAoB,CAAC,UAAU,EAAE;AACjC,oBAAoB,CAAC,KAAK,CAAC,SAAS,CAAC;AACrC,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC;AAChC,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACzC,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,EAAE,SAAS,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK;AACtF,gBAAgB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG;AACzG,gBAAgB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG;AACvG,gBAAgB,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,aAAa,CAAC,CAAC,aAAa,EAAE;AAC/F,gBAAgB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D;AACA,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,WAAW;AAC9D,oBAAoB,CAAC,UAAU,EAAE;AACjC,oBAAoB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACvD,oBAAoB,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAClG,oBAAoB,CAAC,UAAU,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3D,wBAAwB,MAAM,CAAC,qBAAqB,CAAC;AACrD,oBAAoB,EAAE,CAAC,CAAC,SAAS,EAAE;AACnC,oBAAoB,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,SAAS,CAAC;AAC9F,oBAAoB,CAAC,UAAU,GAAG,MAAM,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnE,wBAAwB,MAAM,CAAC,qBAAqB,CAAC;AACrD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,KAAK,GAAG,EAAE,CAAC,SAAS,EAAE,CAAC,aAAa,CAAC;AAC1D,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACzC,YAAY,CAAC;AACb;AACA,YAAY,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACrC,YAAY,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,QAAQ,GAAG;AACX,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC;AAClE,IAAI,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AAC5B,QAAQ,EAAE,CAAC,EAAE,OAAO,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,IAAI,GAAG,CAAC,CAAC;AAC1C,YAAY,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC;AACjE;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3B,YAAY,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE;AACzE;AACA,YAAY,OAAO,CAAC,KAAK,GAAG,MAAM,EAAE,GAAG,EAAE;AACzC,mBAAmB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,GAAG;AACpF,mBAAmB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;AAClC,mBAAmB,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;AACpD,mBAAmB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;AACvC,mBAAmB,CAAC,KAAK,EAAE,QAAQ,EAAE,CAAC,CAAC,KAAK,EAAE;AAC9C,mBAAmB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,CAAC;AACvF,mBAAmB,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,EAAE;AACxD;AACA,YAAY,OAAO,CAAC,IAAI,GAAG,MAAM,EAAE;AACnC,QAAQ,CAAC;AACT,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC;AACrC,IAAI,QAAQ,CAAC,SAAS,EAAE,CAAC,CAAC;AAC1B,QAAQ,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,MAAM,CAAC;AAC7B,QAAQ,EAAE,CAAC,EAAE,gBAAgB,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;AAC5C;AACA,QAAQ,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;AAClC,YAAY,WAAW,GAAG;AAC1B,YAAY,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC;AACrD,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,gBAAgB,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI;AACpG,YAAY,EAAE,SAAS,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC;AAC3E,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE;AACpD,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAC7B,gBAAgB,OAAO,CAAC,IAAI,GAAG,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC;AACtD,YAAY,CAAC;AACb;AACA,YAAY,eAAe,GAAG;AAC9B,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,SAAS,CAAC;AACzB,IAAI,CAAC;AACL;AACA,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC,CAAC,oBAAoB,CAAC;AAC1D,IAAI,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,SAAS,EAAE;AAC7D;AACA,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AAC5C,QAAQ,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO;AACpC,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACrF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACrF,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF,QAAQ,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,gBAAgB,CAAC,CAAC,IAAI;AAC7G,QAAQ,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AACvG,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,IAAI;AAC1G,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACpG,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACrF;AACA,QAAQ,EAAE,CAAC,UAAU,CAAC,OAAO;AAC7B,QAAQ,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClF,YAAY,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,YAAY,EAAE,CAAC,UAAU,EAAE,cAAc,EAAE,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC3E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,YAAY,EAAE,CAAC,UAAU,EAAE,QAAQ,EAAE,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACrE,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC9E,YAAY,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,YAAY,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,OAAO,GAAG;AAC5E,QAAQ,GAAG;AACX;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK;AACnC,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC7B,gBAAgB,SAAS,GAAG;AAC5B,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,QAAQ,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC/D,YAAY,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI;AAC3E,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC1B,gBAAgB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,gBAAgB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG;AAC1C,gBAAgB,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACjE,YAAY,CAAC;AACb,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,QAAQ,GAAG;AACX;AACA,QAAQ,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU;AAC/B,QAAQ,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,KAAK;AAC5E,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI;AAC7D,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,SAAS,EAAE;AACpC,IAAI,MAAM,CAAC,SAAS,CAAC;AACrB,EAAE;ACrWF;AACA;AACA,EAAE;AACF,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI;AAC5B;AACA,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AAC/C,CAAC,EAAE;AACH,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAClC,IAAI,EAAE,CAAC,IAAI,CAAC,QAAQ;AACpB,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE;AACzC;AACA,IAAI,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG;AAC/B,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;AAClD,QAAQ,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC;AACvC,QAAQ,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC;AACzC,QAAQ,MAAM,CAAC,CAAC,IAAI,EAAE;AACtB,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AAC5C,IAAI,EAAE,CAAC,CAAC,QAAQ,CAAC,UAAU,GAAG,UAAU,CAAC,CAAC,EAAE;AAC5C,QAAQ,QAAQ,CAAC,eAAe,CAAC,EAAE;AACnC,QAAQ,QAAQ,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;AAChD;AACA,QAAQ,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,WAAW,CAAC;AAC1D,QAAQ,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,CAAC,YAAY,CAAC;AAC5D,QAAQ,MAAM,CAAC,CAAC,IAAI,EAAE;AACtB,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI;AAC/B,IAAI,EAAE,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACrD,QAAQ,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,CAAC;AAC/C,QAAQ,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC;AACjD,QAAQ,MAAM,CAAC,CAAC,IAAI,EAAE;AACtB,IAAI,CAAC;AACL;AACA,IAAI,MAAM,CAAC,CAAC,IAAI,EAAE;AAClB,EAAE;AACF;AACA;AACA,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,EAAE,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM;AACrE,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC;AACvD,CAAC,EAAE;AACH,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;AACjC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE;AAC/C,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,IAAI,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE;AACnC,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,IAAI,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,IAAI,GAAG;AAChD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,IAAI,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE;AAC9C,EAAE;AACF;AACA;AACA,EAAE;AACF,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO;AACrD,CAAC,EAAE;AACH,EAAE,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;AAC3C,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC;AAClC,QAAQ,MAAM,CAAC,gBAAgB,EAAE,MAAM,EAAE,CAAC,OAAO,EAAE;AACnD,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;AACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE;AACzE,IAAI,CAAC;AACL,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC;AAC7E,IAAI,MAAM,CAAC,CAAC;AACZ,QAAQ,QAAQ,CAAC,CAAC,OAAO,CAAC;AAC1B,QAAQ,KAAK,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC3B,YAAY,MAAM,CAAC,mBAAmB,EAAE,MAAM,EAAE,CAAC,OAAO,EAAE;AAC1D,QAAQ,CAAC;AACT,IAAI,CAAC;AACL,EAAE;AACF;AACA;AACA,EAAE;AACF,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC;AACtE,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK;AAClD,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI;AAC1E,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,aAAa;AACtE,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACrC,IAAI,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI;AACrD,IAAI,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AAC9B,QAAQ,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,GAAG;AACvC;AACA,IAAI,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;AACpD,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;AACxC,QAAQ,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,GAAG,KAAK,CAAC,KAAK,EAAE;AAC1D,QAAQ,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE;AAC/C,QAAQ,EAAE;AACV;AACA,IAAI,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE;AACrE,IAAI,EAAE,QAAQ,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI;AAC/D,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;AACZ,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK;AACjE,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL,EAAE;AACF;AACA;AACA,EAAE;AACF,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;AAC7D,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM;AACtE,CAAC,EAAE;AACH,EAAE,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACpC,IAAI,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC;AACrE,IAAI,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,IAAI;AAC5D,EAAE;AACF;AACA;AACA,EAAE;AACF,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG;AAC9E,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,aAAa,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,UAAU;AACnD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AACpE,IAAI,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS;AACpD,IAAI,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE;AAC9D,IAAI,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,GAAG;AACnE;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC;AAClE,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC;AACxC;AACA,IAAI,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE;AACjC,QAAQ,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC,GAAG,GAAG,CAAC,CAAC;AACnD,YAAY,MAAM,CAAC,UAAU,CAAC,GAAG,IAAI;AACrC,QAAQ,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACnD,YAAY,MAAM,CAAC,UAAU,CAAC,GAAG,EAAE;AACnC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC;AAChB,YAAY,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK;AAC1D,YAAY,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AAC5B,gBAAgB,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,IAAI;AAC1D,gBAAgB,QAAQ,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC;AAChD,YAAY,CAAC;AACb,YAAY,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,YAAY,MAAM,CAAC,aAAa,CAAC,QAAQ,EAAE;AAC3C,QAAQ,CAAC;AACT,IAAI,EAAE;AACN,EAAE;AACF;AACA;AACA,EAAE;AACF,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM;AAC3E,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC1E,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM;AAClD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC1C;AACA,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC1C,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,IAAI,GAAG;AACnD,YAAY,MAAM,CAAC,UAAU,CAAC,YAAY,CAAC;AAC3C,gBAAgB,EAAE,CAAC,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,OAAO,EAAE,IAAI,GAAG;AAC3D,gBAAgB,MAAM,EAAE;AACxB,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE;AAC1C,QAAQ,GAAG;AACX,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,SAAS,CAAC,KAAK,EAAE,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChD,QAAQ,OAAO,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE;AAClE,QAAQ,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE;AACxB,QAAQ,EAAE,CAAC,KAAK,CAAC,cAAc,GAAG;AAClC,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AACjD,QAAQ,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,YAAY,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AACjC,QAAQ,CAAC;AACT,IAAI,GAAG;AACP,EAAE;AACF;AACA;AACA,EAAE;AACF,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC;AAC5E,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,SAAS,CAAC;AACzE,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,eAAe;AACtD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AACvD,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,CAAC;AAC1F,QAAQ,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,GAAG,OAAO,EAAE,EAAE,MAAM,CAAC,EAAE,EAAE;AACrF,QAAQ,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,GAAG,MAAM,CAAC;AACnD,QAAQ,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/D,IAAI,CAAC;AACL,IAAI,MAAM,CAAC,CAAC,CAAC;AACb,EAAE;AACF;AACA;AACA,EAAE;AACF,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC;AAC/D,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC7B,QAAQ,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACnB,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI;AACrB,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ;AACzB,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC7B;AACA,QAAQ,MAAM,CAAC,CAAC,CAAC;AACjB,IAAI,CAAC;AACL,IAAI,MAAM,CAAC,CAAC,CAAC;AACb,EAAE;AACF;AACA,EAAE;AACF,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE;AAC/C,EAAE;AACF,EAAE,CAAC,SAAS,CAAC,SAAS,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,EAAE;AAC/D,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,IAAI,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG;AAC1D,IAAI,MAAM,CAAC,WAAW,CAAC,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE;AAC3D,EAAE;AACF;AACA;AACA,EAAE;AACF,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,SAAS;AACrD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AACrD,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC;AAClD,QAAQ,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC5D,IAAI,CAAC;AACL;AACA,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC;AAC5D,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG;AACzB,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACpB;AACA,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACpC,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;AAC7C,QAAQ,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,EAAE;AACvC,YAAY,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC;AACrC,YAAY,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzB,gBAAgB,CAAC,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,GAAG,EAAE;AACzD,oBAAoB,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AACxC,oBAAoB,IAAI,CAAC,SAAS,EAAE,KAAK,GAAG;AAC5C,gBAAgB,GAAG;AACnB,YAAY,GAAG,KAAK,EAAE;AACtB;AACA,YAAY,EAAE,CAAC,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gBAAgB,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACxC,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,IAAI,MAAM,CAAC,IAAI,CAAC;AAChB,IAAI,EAAE;AACN;AACA,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACrC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACrC,YAAY,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC;AACjC,QAAQ,CAAC;AACT,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG;AACzB,IAAI,EAAE;AACN;AACA,IAAI,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC3D,QAAQ,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACvE;AACA,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,YAAY,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,GAAG;AAClC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC;AAChB,YAAY,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC;AACjE,QAAQ,CAAC;AACT,QAAQ,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC;AACrC;AACA,QAAQ,EAAE,CAAC,CAAC,WAAW,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,YAAY,WAAW,CAAC,IAAI,CAAC,SAAS,EAAE;AACxC,QAAQ,CAAC;AACT;AACA,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,YAAY,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AACxC,YAAY,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE;AAC1D,YAAY,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE;AAC7D,YAAY,MAAM,CAAC,SAAS,CAAC;AAC7B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC;AAChB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,gBAAgB,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5C,YAAY,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC7E,gBAAgB,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5C,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC;AAC7C,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,YAAY,MAAM,CAAC,SAAS;AAC5B,gBAAgB,CAAC,UAAU,EAAE;AAC7B,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACnC,gBAAgB,CAAC,IAAI,CAAC,QAAQ,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,oBAAoB,EAAE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,wBAAwB,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AACpD,wBAAwB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE;AACzD,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACjC,QAAQ,EAAE,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACvE,YAAY,WAAW,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG;AACvE,YAAY,QAAQ,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE;AACtD,QAAQ,CAAC;AACT,IAAI,CAAC;AACL;AACA,EAAE;AACF;AACA;AACA,EAAE;AACF,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC;AACjE,OAAO,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC5D,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AACpC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC1E,IAAI,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACtC,QAAQ,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;AACjC,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,GAAG;AACrD,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,GAAG;AACvD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,GAAG;AACxD;AACA,YAAY,EAAE,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;AACjD,gBAAgB,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,GAAG,GAAG;AAC3D,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AACvC,YAAY,CAAC;AACb,QAAQ,CAAC;AACT,IAAI,GAAG;AACP,EAAE;AACF;AACA;AACA,EAAE;AACF,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AAC3D,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,GAAG;AAC5B,IAAI,EAAE,CAAC,GAAG,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC;AAC5C,QAAQ,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG;AACpC,IAAI,CAAC;AACL,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACnB,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AACrB,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,KAAK;AACjC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE;AAC7C,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACpB,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AACvB;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,CAAC,CAAC,GAAG,GAAG;AACjD;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,KAAK,EAAE;AAC5C,QAAQ,SAAS,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE;AAC/B,IAAI,GAAG;AACP;AACA,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,EAAE;AAC/B,QAAQ,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;AACvB,QAAQ,MAAM,CAAC,IAAI,CAAC;AACpB,IAAI,EAAE;AACN;AACA,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC1C,QAAQ,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AACxB,YAAY,QAAQ,CAAC,CAAC,CAAC,QAAQ,KAAK;AACpC,QAAQ,CAAC;AACT,QAAQ,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,EAAE;AACtB,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACzB,gBAAgB,QAAQ,GAAG;AAC3B,YAAY,CAAC;AACb,QAAQ,EAAE;AACV,QAAQ,MAAM,CAAC,IAAI,CAAC;AACpB,IAAI,EAAE;AACN;AACA,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,EAAE;AAChC,QAAQ,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG;AAC1B,QAAQ,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE;AACzC,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,GAAG;AAC1B,QAAQ,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,GAAG;AACnC;AACA,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,CAAC;AACjE,YAAY,MAAM,CAAC,KAAK,CAAC;AACzB,QAAQ,CAAC;AACT;AACA,QAAQ,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACnC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AAC3C,gBAAgB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG;AAChC,YAAY,CAAC;AACb,YAAY,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AACvC,YAAY,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AAC3B,QAAQ,CAAC;AACT,QAAQ,MAAM,CAAC,IAAI,CAAC;AACpB,IAAI,EAAE;AACN;AACA,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,GAAG;AAC7B,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACnB,YAAY,SAAS,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE;AACnC,YAAY,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACxB,QAAQ,CAAC;AACT,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;AAC9B,YAAY,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,EAAE;AACxC,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,EAAE;AACF;AACA;AACA,EAAE;AACF,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;AAC/E,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;AACf,KAAK,CAAC,OAAO,EAAE;AACf,EAAE,SAAS,CAAC,CAAC,IAAI,CAAC;AAClB,EAAE,QAAQ,CAAC,CAAC,IAAI;AAChB,GAAG;AACH;AACA,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC;AACvB,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACjD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACf,QAAQ,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AACnD,YAAY,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC;AACjD,gBAAgB,IAAI,CAAC,GAAG,EAAE,KAAK,EAAE;AACjC,YAAY,CAAC;AACb,QAAQ,GAAG,IAAI,CAAC,IAAI,GAAG;AACvB,IAAI,CAAC;AACL,IAAI,MAAM,CAAC,IAAI,CAAC;AAChB,EAAE;AACF;AACA;AACA,EAAE;AACF,QAAQ,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK;AACpC,IAAI,CAAC,EAAE,GAAG,CAAC,KAAK,CAAC,IAAI;AACrB;AACA,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI;AAC1E,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AAC5C,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAChD,IAAI,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO;AACtD,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACd,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,QAAQ,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/E,QAAQ,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC;AACpE,IAAI,CAAC;AACL,IAAI,EAAE,CAAC,GAAG,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AACpD,IAAI,EAAE,CAAC,GAAG,EAAE,UAAU,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE;AACvD,IAAI,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU;AACzE,IAAI,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1E,IAAI,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI;AAC1C,IAAI,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC3C,IAAI,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO;AAChC,IAAI,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;AACpC,IAAI,EAAE,CAAC,GAAG,EAAE,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AACpD,IAAI,MAAM,CAAC,QAAQ,CAAC;AACpB,EAAE;AACF;AACA;AACA,EAAE;AACF,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI;AACtE,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAChD,IAAI,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK;AAC3E,IAAI,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,IAAI,EAAE;AAC/C,EAAE;AACF;AACA;AACA,EAAE;AACF,GAAG,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK;AACzD,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK;AAC1E,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,GAAG;AAC/E;AACA,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,CAAC,OAAO;AACjE,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC;AACtD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7C,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG;AACzE,IAAI,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC;AAC7C,QAAQ,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,EAAE;AACzC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;AACZ,QAAQ,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,YAAY,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE;AAC/D,YAAY,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAC1C,YAAY,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,YAAY,MAAM,CAAC,KAAK,CAAC;AACzB,QAAQ,EAAE;AACV,QAAQ,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO;AAC7E,QAAQ,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ;AAC1E,QAAQ,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE;AAC/D,YAAY,EAAE,CAAC,EAAE,KAAK,CAAC,UAAU,CAAC,IAAI,EAAE,CAAC,CAAC;AAC1C,gBAAgB,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,CAAC;AACzB,QAAQ,CAAC;AACT,IAAI,CAAC;AACL,EAAE;AACF;AACA;AACA,EAAE;AACF,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK;AACjD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACxC,IAAI,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,CAAC,GAAG;AAC9C,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,mBAAmB,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI;AAC/D,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,mBAAmB,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI;AAC/D,IAAI,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,EAAE;AAC5B,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AACxB,QAAQ,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG;AAC3C,IAAI,CAAC;AACL,EAAE;AACF;AACA;AACA,EAAE;AACF,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM;AAChC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM;AACzE,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO,CAAC,WAAW;AAC7F,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACjE,IAAI,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,IAAI;AAC/D,IAAI,MAAM,CAAC,OAAO,CAAC,SAAS,EAAE;AAC9B,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,EAAE;AAC3B,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE;AAClC,EAAE;AACF;AACA;AACA,EAAE;AACF,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK;AAC/B,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAChD,QAAQ,MAAM,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1C,IAAI,GAAG;AACP,EAAE;AACF;AACA;AACA,EAAE;AACF,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACxE,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACtD,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,QAAQ;AAC3E,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,GAAG;AAC9B;AACA;AACA,EAAE;AACF,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG;AACrE,CAAC,EAAE;AACH,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC9B,IAAI,GAAG,CAAC,IAAI,CAAC;AACb,QAAQ,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;AAClB,IAAI,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE;AACpC,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE;AACpC,QAAQ,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,YAAY,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AACrD,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC;AAChB,YAAY,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE;AAChD,QAAQ,CAAC;AACT,IAAI,CAAC;AACL,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC3C,QAAQ,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE;AAC7B,QAAQ,MAAM,CAAC,MAAM,CAAC;AACtB,IAAI,EAAE;AACN,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC3C,QAAQ,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE;AAC7B,QAAQ,MAAM,CAAC,MAAM,CAAC;AACtB,IAAI,EAAE;AACN,IAAI,MAAM,CAAC,MAAM,CAAC;AAClB,EAAE;AACF;AACA;AACA,EAAE;AACF,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM;AAC5D,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM;AACzE,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO;AACxE,GAAG,CAAC,aAAa,CAAC,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,WAAW;AAC7D,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACpD,IAAI,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM;AAC7B,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI;AAChE,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,mBAAmB,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI;AAChE,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,GAAG;AAC5C,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,GAAG;AACxC,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,EAAE;AACjE,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;AACzB,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;AACzB,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE;AAChC,IAAI,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU;AACzE,IAAI,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,KAAK;AAC9H,IAAI,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,KAAK;AACpF,EAAE;AACF;AACA;AACA,EAAE;AACF,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM;AAC9D,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAClC,IAAI,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,IAAI,GAAG;AACnC,EAAE;AACF;AACA;AACA,EAAE;AACF,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM,CAAC;AACtD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,IAAI,MAAM,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE;AACtE,EAAE;AACF;AACA;AACA,EAAE;AACF,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC;AACrD,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AACrD,IAAI,MAAM,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE;AACpE,EAAE;AACF;AACA;AACA,EAAE;AACF,SAAS,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3C,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAChE,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE;AAC/F,EAAE;AACF;AACA,EAAE;AACF,SAAS,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC9D,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE;AAC7F,EAAE;AACF;AACA,EAAE;AACF,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO;AAC1E,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC9C,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,GAAG;AAC9B,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG;AAC9B,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG;AAC9B,QAAQ,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AACpE,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnE,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACjE,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAClC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAClC;AACA,IAAI,EAAE,MAAM,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,KAAK,CAAC,UAAU;AACpD,IAAI,SAAS,CAAC,SAAS,EAAE,CAAC,GAAG,MAAM,GAAG;AACtC;AACA,IAAI,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,IAAI,CAAC,IAAI,EAAE;AAClE;AACA,IAAI,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACrC,QAAQ,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,EAAE;AACxC,QAAQ,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,GAAG,GAAG,EAAE;AAC5B,QAAQ,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG;AACxC;AACA,IAAI,UAAU;AACd,QAAQ,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACrB,QAAQ,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACrB,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG;AACxC,EAAE;AACF;AACA,EAAE;AACF,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC;AAClB,CAAC,EAAE;AACH,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7C,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC1B,QAAQ,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,YAAY,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,GAAG,KAAK,GAAG,CAAC,IAAI,OAAO,GAAG;AACvD,YAAY,IAAI,CAAC;AACjB,YAAY,IAAI,CAAC,CAAC,CAAC,GAAG;AACtB,YAAY,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG;AAC/B,YAAY,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,IAAI;AAC7C,YAAY,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG;AACpG,QAAQ,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC;AACpC,YAAY,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE;AAC5B,YAAY,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,IAAI;AACvC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG,qBAAqB,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC/D,gBAAgB,IAAI,CAAC,GAAG,GAAG;AAC3B,gBAAgB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,IAAI;AAC3C,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AAC9B,gBAAgB,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,KAAK,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,IAAI,EAAE;AACpI,YAAY,CAAC;AACb,QAAQ,CAAC;AACT,IAAI,GAAG;AACP,EAAE;AACF;AACA,EAAE;AACF,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK;AACzB,EAAE;AACF,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAClD,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC;AAC1B,QAAQ,MAAM,CAAC,IAAI,CAAC;AACpB;AACA,IAAI,EAAE,CAAC,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC;AAC3B,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI;AAC/C,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC;AACvC,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB;AACA,IAAI,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnB,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM;AACzC,QAAQ,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;AACvE,YAAY,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM;AAC7C,YAAY,EAAE,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG;AACtD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,QAAQ,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC;AAC5C,YAAY,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7F,YAAY,MAAM,CAAC,KAAK,CAAC;AACzB,QAAQ,CAAC;AACT,IAAI,CAAC;AACL,IAAI,MAAM,CAAC,IAAI,CAAC;AAChB,EAAE;ACrsBF,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC7B,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,GAAG;AAC7B,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG;AAClC;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,SAAS;AACxD,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,SAAS;AACzD,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI;AAC9B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI;AAC5F,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK;AAC/B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK;AAC3B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS;AAC9B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE;AAC7C,QAAQ,CAAC;AACT,IAAI,IAAI;AACR,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC;AACrB,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE;AACzB,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AAC7C,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC;AACf,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,EAAE,IAAI,GAAG;AAC7E,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,GAAG;AAC3F,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC;AAC/B,gBAAgB,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,YAAY,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE;AACzE,gBAAgB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE;AAChF;AACA,YAAY,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS;AAC1I,YAAY,CAAC,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,EAAE;AAC9D;AACA,YAAY,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,GAAG;AAC5C;AACA,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,GAAG;AACxC,YAAY,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9B,gBAAgB,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,GAAG;AAC1C,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,SAAS,EAAE;AAC5D,gBAAgB,CAAC,IAAI,EAAE,aAAa,CAAC,EAAE,CAAC,IAAI,GAAG;AAC/C,YAAY,SAAS,CAAC,IAAI,GAAG,MAAM,GAAG;AACtC;AACA,YAAY,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM;AACnG,YAAY,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACzC,gBAAgB,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG,MAAM,EAAE,IAAI,GAAG,KAAK,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,EAAE;AAC7E,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,YAAY,CAAC;AAC7B,YAAY,GAAG,CAAC,UAAU,CAAC;AAC3B,YAAY,GAAG,CAAC,CAAC,CAAC;AAClB,YAAY,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;AACpC,gBAAgB,IAAI,CAAC,CAAC,GAAG,EAAE;AAC3B,oBAAoB,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AACnF,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxB,kBAAkB,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,oBAAoB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACjF,kBAAkB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,oBAAoB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE;AAC/G,kBAAkB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxD,oBAAoB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,GAAG,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,CAAC,KAAK,GAAG,CAAC,GAAG;AAClG,kBAAkB,EAAE;AACpB,oBAAoB,SAAS;AAC7B,wBAAwB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AACtD,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACrC,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,oBAAoB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACrC,wBAAwB,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE;AACtE,4BAA4B,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,IAAI;AAClD,wBAAwB,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gCAAgC,MAAM,CAAC,EAAE,EAAE,CAAC,UAAU,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC,EAAE;AAC7H,wBAAwB,GAAG,MAAM,EAAE,IAAI,GAAG;AAC1C,wBAAwB,UAAU,CAAC,IAAI,GAAG,MAAM,GAAG;AACnD,wBAAwB,UAAU;AAClC,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC1F,4BAA4B,EAAE;AAC9B,4BAA4B,CAAC,MAAM,EAAE,IAAI,EAAE;AAC3C,4BAA4B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE;AACjD,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG;AAC3D,4BAA4B,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AAC1D,4BAA4B,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAC/C,gCAAgC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,4BAA4B,GAAG;AAC/B,wBAAwB,UAAU,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE;AAC9E,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAClG,4BAA4B,GAAG;AAC/B,oBAAoB,CAAC;AACrB,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,MAAM,EAAE;AAC9B,oBAAoB,YAAY,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,EAAE,CAAC;AAC1D,oBAAoB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC;AAC1C,oBAAoB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,oBAAoB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG,MAAM,EAAE,IAAI,GAAG;AACjE,oBAAoB,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,GAAG;AAC9C,oBAAoB,EAAE,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3C,wBAAwB,EAAE,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC,UAAU,CAAC,SAAS;AAC5F,wBAAwB,MAAM,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,IAAI,CAAC;AACtD,wBAAwB,EAAE,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK;AAC3D,wBAAwB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,4BAA4B,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,qBAAqB,GAAG;AACnE,4BAA4B,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AAClD,4BAA4B,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC;AACpD,4BAA4B,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1E,wBAAwB,GAAG;AAC3B,wBAAwB,gBAAgB,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI;AACxH,wBAAwB,EAAE,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC;AAC1G,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,GAAG;AAC/E,wBAAwB,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,YAAY,EAAE,EAAE,CAAC;AAClF,wBAAwB,EAAE,MAAM,CAAC,GAAG,CAAC,MAAM;AAC3C,wBAAwB,MAAM;AAC9B,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,gBAAgB,CAAC;AAChE,4BAA4B,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AAC1F,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC;AAC5B,wBAAwB,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AAC5C,4BAA4B,MAAM;AAClC,gCAAgC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClE,oCAAoC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG;AAC3F,gCAAgC,GAAG;AACnC,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,MAAM,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,IAAI;AACvE,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,oBAAoB,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AACnF,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACrF,oBAAoB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5D,wBAAwB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE;AACnH,oBAAoB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1D,wBAAwB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,GAAG,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,CAAC,KAAK,GAAG,CAAC,GAAG;AACtG,oBAAoB,EAAE;AACtB,oBAAoB,SAAS;AAC7B,wBAAwB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AACtD,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC;AAChD,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,oBAAoB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACrC,wBAAwB,EAAE,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AACzD,wBAAwB,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE;AACtE,4BAA4B,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG;AACnD,4BAA4B,CAAC,IAAI,EAAE,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI;AAClG,wBAAwB,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gCAAgC,MAAM,CAAC,EAAE,EAAE,CAAC,UAAU,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC,EAAE;AAC7H,wBAAwB,GAAG,MAAM,EAAE,IAAI,GAAG;AAC1C,wBAAwB,UAAU,CAAC,IAAI,GAAG,MAAM,GAAG;AACnD,wBAAwB,UAAU;AAClC,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AACtI,4BAA4B,EAAE;AAC9B,4BAA4B,CAAC,MAAM,EAAE,IAAI,EAAE;AAC3C,4BAA4B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AAChD,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,WAAW,GAAG;AAC1D,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,gBAAgB,CAAC;AAChE,4BAA4B,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AACrH,4BAA4B,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAC/C,gCAAgC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,4BAA4B,GAAG;AAC/B,wBAAwB,UAAU,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,EAAE;AACjF,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AACtI,4BAA4B,GAAG;AAC/B,oBAAoB,CAAC;AACrB;AACA,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,KAAK,EAAE;AAC7B,oBAAoB,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AACnF,oBAAoB,SAAS;AAC7B,wBAAwB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE;AAChF,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG;AAC5E,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK;AAC3N,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,IAAI;AACpG,oBAAoB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACrC,wBAAwB,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE;AACtE,4BAA4B,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,IAAI;AAClD,wBAAwB,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gCAAgC,MAAM,CAAC,EAAE,EAAE,CAAC,UAAU,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC,EAAE;AAC7H,wBAAwB,GAAG,MAAM,EAAE,IAAI,EAAE;AACzC,4BAA4B,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACjD,wBAAwB,UAAU,CAAC,IAAI,GAAG,MAAM,GAAG;AACnD,wBAAwB,UAAU;AAClC,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAC1F,4BAA4B,EAAE;AAC9B,4BAA4B,CAAC,MAAM,EAAE,IAAI,EAAE;AAC3C,4BAA4B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AAChD,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzC,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,WAAW,GAAG;AAC1D,4BAA4B,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,KAAK,EAAE;AAC1D,4BAA4B,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAC/C,gCAAgC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,4BAA4B,GAAG;AAC/B,wBAAwB,UAAU,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,EAAE;AAChF,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAClG,4BAA4B,EAAE;AAC9B,4BAA4B,CAAC,MAAM,EAAE,IAAI,EAAE;AAC3C,4BAA4B,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACjD,oBAAoB,CAAC;AACrB,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,IAAI,EAAE;AAC5B,oBAAoB,EAAE;AACtB,qBAAqB,EAAE,GAAG,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO;AAC/G,qBAAqB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG,MAAM,EAAE,IAAI,GAAG;AAClE,qBAAqB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,qBAAqB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,qBAAqB,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACrG,qBAAqB,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,YAAY,CAAC;AACnE,qBAAqB,GAAG;AACxB,qBAAqB,EAAE;AACvB,oBAAoB,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AACnF,oBAAoB,SAAS;AAC7B,wBAAwB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9E,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG;AAC7E,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AACxH,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,IAAI;AACtG,oBAAoB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACrC,wBAAwB,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE;AACtE,4BAA4B,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,IAAI;AAClD,wBAAwB,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gCAAgC,MAAM,CAAC,EAAE,EAAE,CAAC,UAAU,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC,EAAE;AAC7H,wBAAwB,GAAG,MAAM,EAAE,IAAI,EAAE;AACzC,4BAA4B,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACjD,wBAAwB,UAAU,CAAC,IAAI,GAAG,MAAM,GAAG;AACnD,wBAAwB,UAAU;AAClC,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAC3F,4BAA4B,EAAE;AAC9B,4BAA4B,CAAC,MAAM,EAAE,IAAI,EAAE;AAC3C,4BAA4B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AAChD,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzC,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG;AAC3D,4BAA4B,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,EAAE;AACvD,4BAA4B,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAC/C,gCAAgC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,4BAA4B,GAAG;AAC/B,wBAAwB,UAAU,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,EAAE;AAChF,4BAA4B,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAClG,4BAA4B,EAAE;AAC9B,4BAA4B,CAAC,MAAM,EAAE,IAAI,EAAE;AAC3C,4BAA4B,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACjD,oBAAoB,CAAC;AACrB,oBAAoB,KAAK,CAAC;AAC1B,YAAY,CAAC;AACb,YAAY,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACrD;AACA,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC;AACxF,gBAAgB,EAAE,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO;AAChG,gBAAgB,CAAC,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI;AAC9D,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AAC1E,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;AAC/J,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,SAAS,CAAC,KAAK;AACnK,gCAAgC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACnE;AACA,4BAA4B,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;AAC9G,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB;AACA,gBAAgB,EAAE,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG;AAC9D,gBAAgB,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACvF,oBAAoB,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,KAAK,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxF,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;AACxF,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG;AACrC,gBAAgB,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE;AACjD,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,wBAAwB,GAAG,CAAC,CAAC;AAC7B,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ;AACzD,gCAAgC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,qBAAqB,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC;AAC1M,4BAA4B,IAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ;AACtD,gCAAgC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,qBAAqB,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACnG,wBAAwB,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACtC,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ;AACzD,gCAAgC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC;AACrK,4BAA4B,IAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ;AACtD,gCAAgC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/D,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB,gBAAgB,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI;AAC7C,gBAAgB,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,CAAC;AACjF,wBAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,SAAS,CAAC,KAAK;AAC/J,4BAA4B,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG;AACrD,wBAAwB,IAAI;AAC5B,4BAA4B,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,MAAM,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;AACpG,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI;AACtC,YAAY,CAAC,CAAC,SAAS,GAAG,IAAI,EAAE;AAChC,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,oBAAoB,EAAE;AACtB,oBAAoB,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC;AAC1E,oBAAoB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC;AACvE,oBAAoB,GAAG,CAAC,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC;AAC/D,oBAAoB,EAAE;AACtB,oBAAoB,MAAM,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC;AAC7F,gBAAgB,EAAE,CAAC;AACnB,gBAAgB,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE;AACvC,YAAY;AACZ,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG;AAClC;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,GAAG;AAChD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACtB,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,iBAAiB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,iBAAiB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,iBAAiB,CAAC,CAAC,IAAI;AAChH,QAAQ,aAAa,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACxG,QAAQ,YAAY,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACtG,QAAQ,YAAY,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACtG,QAAQ,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAClG,QAAQ,SAAS,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACxG,QAAQ,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAC1F,QAAQ,KAAK,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACxF,QAAQ,KAAK,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACxF,QAAQ,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AAC9F;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC;AACvB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,YAAY,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE;AAC9B,YAAY,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE;AAC/D,YAAY,EAAE,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,UAAU,IAAI;AACpG,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,EAAE,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,UAAU,EAAE,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,UAAU,IAAI;AAC/H,IAAI,EAAE,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,UAAU,IAAI;AAC5F;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACvYF,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;AACxD,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC;AACpB,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC;AACrB,QAAQ,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,GAAG;AACpC,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG;AACnC,QAAQ,IAAI,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC;AAChF,QAAQ,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE;AACnD,QAAQ,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE;AACnD,QAAQ,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE;AACnD,QAAQ,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE;AAC5D,QAAQ,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE;AAC7D,QAAQ,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;AAClD,QAAQ,UAAU,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE;AAC/D,QAAQ,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACpD,QAAQ,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACpD,QAAQ,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE;AAC5D,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,GAAG;AACxC,QAAQ,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACzB,QAAQ,OAAO,CAAC,CAAC,MAAM,CAAC;AACxB,QAAQ,OAAO,CAAC,CAAC,MAAM,CAAC;AACxB,QAAQ,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,SAAS,GAAG;AACvG,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC;AACvB,QAAQ,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC;AAC3B;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC;AACzB,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACnF,gBAAgB,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAChE;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS;AACvD,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;AAC1B,YAAY,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC;AAC3B,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC;AACzE,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC;AAC5C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,oBAAoB,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AACnF,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE;AAChD,oBAAoB,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAClC,wBAAwB,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,4BAA4B,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,GAAG;AACrE,wBAAwB,GAAG;AAC3B,oBAAoB,CAAC;AACrB,oBAAoB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AAC/C,gBAAgB,GAAG;AACnB,gBAAgB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE;AACtC,gBAAgB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE;AACtC,gBAAgB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,CAAC;AACb;AACA,YAAY,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE;AAC5C,YAAY,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AACzD;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK;AAC5C,YAAY,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC;AACxC,YAAY,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG,KAAK,EAAE,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAC5E;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,EAAE,IAAI,GAAG;AACrE,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,GAAG;AACnF,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,GAAG,EAAE,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACxF,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAClH,YAAY,QAAQ;AACpB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE;AAC5C,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE;AACvI,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG;AAClE,YAAY,QAAQ;AACpB,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE;AACrE,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AAC5C,gBAAgB,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;AAC3E,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG;AACnG,gBAAgB,GAAG;AACnB,YAAY,QAAQ,CAAC,IAAI,GAAG,MAAM,GAAG;AACrC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK;AAChE;AACA,YAAY,EAAE,CAAC,aAAa,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK;AACjD,YAAY,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9D,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACjE,wBAAwB,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE;AAC1C,0BAA0B,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACrE,0BAA0B,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE;AACjF,wBAAwB,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE;AAC1C,0BAA0B,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACrE,0BAA0B,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9E,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAC9I,YAAY,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,CAAC,EAAE;AAC7F,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,CAAC,EAAE;AAC7F;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK;AAC7C,YAAY,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACzD,gBAAgB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC7D,gBAAgB,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;AAC5E,kBAAkB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE;AACvE,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC3D,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACvE,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC3D,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,GAAG;AAC/E,gBAAgB,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;AACzE,kBAAkB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE;AACvE,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC;AAC1C,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACvE,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,SAAS,CAAC,CAAC;AAC3C,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AACxE,YAAY,GAAG;AACf;AACA,YAAY,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACzD,gBAAgB,QAAQ,CAAC,SAAS,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;AACxD,kBAAkB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,sBAAsB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC7D,sBAAsB,QAAQ,CAAC,gBAAgB,EAAE;AACjD,0BAA0B,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,0BAA0B,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACrC,sBAAsB,GAAG;AACzB,kBAAkB,EAAE;AACpB,kBAAkB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,sBAAsB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC9D,sBAAsB,QAAQ,CAAC,eAAe,EAAE;AAChD,0BAA0B,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,0BAA0B,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACrC,sBAAsB,GAAG;AACzB,kBAAkB,EAAE;AACpB,kBAAkB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,sBAAsB,QAAQ,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AAC/D,kBAAkB,GAAG;AACrB,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,KAAK;AACpB,YAAY,QAAQ,CAAC,MAAM,EAAE,IAAI,EAAE;AACnC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE;AAChD,gBAAgB,EAAE,CAAC,OAAO,CAAC,MAAM;AACjC,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC3D,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AACrC,wBAAwB,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AACvC,wBAAwB,MAAM,CAAC,CAAC,CAAC;AACjC,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7F,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7F,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5F,wBAAwB,EAAE;AAC1B,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACnC,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AACrC,wBAAwB,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AACvC,wBAAwB,MAAM,CAAC,CAAC,CAAC;AACjC,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7F,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7F,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5F,wBAAwB,EAAE;AAC1B,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACnC,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AAC7D,gBAAgB,GAAG;AACnB;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,WAAW;AAC9B,YAAY,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE;AAClD,cAAc,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE;AAChE,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE;AACtE,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,SAAS,CAAC;AACzC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC;AACrC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;AAC5G,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACrF;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,IAAI;AAC1B,YAAY,QAAQ,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,GAAG;AACvE;AACA,YAAY,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,EAAE;AACrD,cAAc,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE;AACxE,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC;AACrC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE;AACvE,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,SAAS,CAAC;AACtC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG;AACxE;AACA,YAAY,EAAE,CAAC,QAAQ;AACvB,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,GAAG,EAAE,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACvF,gBAAgB,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAC3C,YAAY,GAAG;AACf,YAAY,QAAQ,CAAC,KAAK,GAAG,MAAM,EAAE,MAAM,EAAE;AAC7C,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1F,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5F,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC;AACvC,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC3D,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnG,wBAAwB,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACnC,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnG,wBAAwB,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK;AACnC,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AAC7D,gBAAgB,GAAG;AACnB,YAAY,QAAQ,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,GAAG;AACzD,YAAY,QAAQ;AACpB,cAAc,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE;AAC7E,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AACtD,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAClF,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAChC,YAAY,QAAQ,CAAC,IAAI,GAAG,MAAM,GAAG;AACrC;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,GAAG;AACpC,YAAY,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,GAAG;AACpC,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,EAAE,CAAC,OAAO,CAAC,SAAS,GAAG;AACtD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAClF,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACpF,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC9F,QAAQ,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAChF,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzE,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzE,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzE,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzE,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzE,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACzF,QAAQ,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7F,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC5E,QAAQ,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,CAAC,CAAC,CAAC,CAAC;AACZ,YAAY,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC7B,gBAAgB,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,CAAC,OAAO,IAAI;AAC9G,gBAAgB,MAAM,CAAC,GAAG;AAC1B,YAAY,EAAE;AACd,YAAY,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,CAAC,OAAO,IAAI;AAC9G,YAAY,CAAC;AACb,QAAQ,EAAE;AACV,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACpUF,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AACtC,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG;AACjC,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG;AACjC;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE;AAC5D,QAAQ,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AACrB,QAAQ,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AACtB,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,GAAG;AACpC,QAAQ,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACzB,QAAQ,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACzB,QAAQ,eAAe,CAAC,CAAC,CAAC,KAAK,CAAC;AAChC,QAAQ,aAAa,CAAC,CAAC,CAAC,KAAK,CAAC;AAC9B,QAAQ,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AACtC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACb,QAAQ,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,SAAS,GAAG;AACtC,QAAQ,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,CAAC,CAAC,SAAS,GAAG;AAC5D,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC;AACvB;AACA,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE;AACzB,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC;AAC1B,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AAC7C,IAAI,CAAC;AACL,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACrD,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,GAAG;AACtC,IAAI,CAAC;AACL;AACA,IAAI,OAAO,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,OAAO,EAAE;AACpC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACzD,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpH,YAAY,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtH;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,gBAAgB,QAAQ,CAAC,YAAY,GAAG;AACxC,gBAAgB,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE;AACtE,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC;AACxG,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE;AACnG,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AACxC,gBAAgB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,IAAI,EAAE,MAAM,GAAG;AAClF;AACA,gBAAgB,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACjD,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,EAAE;AACpD,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,GAAG,GAAG,EAAE;AACxC,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG;AACpD;AACA,gBAAgB,UAAU;AAC1B,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;AAChE,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChE,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACpD;AACA,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC,IAAI,EAAE;AAC7C;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,eAAe,GAAG,IAAI,EAAE,IAAI,GAAG;AACxF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,eAAe,GAAG,MAAM,EAAE,CAAC,GAAG;AAC/G,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAClD,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AAC5D,gBAAgB,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AACzD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC5D,YAAY,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACrF;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,YAAY,OAAO,CAAC,KAAK,CAAC,cAAc,EAAE,MAAM,CAAC,eAAe,EAAE;AAClE;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,EAAE;AACnD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AACvE;AACA,YAAY,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE;AAChD;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,GAAG;AAC9D,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AAClE,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvE,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;AACnC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACtE;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3E,oBAAoB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AACjG,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE;AACtD;AACA,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,SAAS,EAAE,CAAC,GAAG;AACtE,gBAAgB,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AACpC,oBAAoB,MAAM;AAC1B,wBAAwB,CAAC,SAAS,EAAE,IAAI,EAAE;AAC1C,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACxH,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI;AAC9G,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE;AACtD,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,IAAI;AACxB,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,cAAc,CAAC;AAC1C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG;AAC5D,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAClE,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,EAAE;AACvC,IAAI,GAAG;AACP;AACA,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACpG,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF,QAAQ,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAC5F,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACvC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE;AACjC,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAClD,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE;AAC5C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,CAAC;ACvOD;AACA,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,eAAe,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,cAAc;AAC3E,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC;AAChE,EAAE,CAAC,IAAI,GAAG,QAAQ,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,WAAW,CAAC;AAC7D;AACA,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC/B,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC9C,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK;AACzB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;AAClD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACrE,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClF,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC;AACtD,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACjF,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;AACrF,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;AACjG,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;AACxF,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE;AACjG,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,IAAI,MAAM,GAAG;AAChD,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE;AAC3F,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE;AAC7D,QAAQ,CAAC,CAAC,qBAAqB,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,EAAE;AACvD,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACzB,QAAQ,CAAC;AACT;AACA,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE;AACxC,QAAQ,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,GAAG;AAChC,QAAQ,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnC,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE;AACnC,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE;AACnC,YAAY,MAAM,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG;AACzD,QAAQ,GAAG;AACX,IAAI,EAAE;AACN;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AACzD,gBAAgB,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AAC3D,gBAAgB,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AACnE,gBAAgB,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AAC7D,gBAAgB,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AACnE,gBAAgB,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AACrE,gBAAgB,gBAAgB,CAAC,CAAC,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AAC7E,gBAAgB,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG;AACvE;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM;AAC3D,YAAY,UAAU,CAAC,WAAW,CAAC,CAAC,MAAM,EAAE;AAC5C,YAAY,UAAU,CAAC,YAAY,CAAC,CAAC,OAAO,EAAE;AAC9C,YAAY,UAAU,CAAC,gBAAgB,CAAC,CAAC,WAAW,EAAE;AACtD,YAAY,UAAU,CAAC,aAAa,CAAC,CAAC,QAAQ,EAAE;AAChD;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU;AACrC,YAAY,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AACvC,YAAY,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AACxC,YAAY,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AAC5C,YAAY,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AACzC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC;AACvC,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACtC,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;AAChE,gBAAgB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG;AAC5E;AACA,YAAY,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC;AAC9D,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACxD,gBAAgB,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;AACtC,gBAAgB,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,IAAI;AACnC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC;AACnC,YAAY,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;AAChC;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;AACtD,gBAAgB,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;AACtD,gBAAgB,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE;AACrC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,GAAG,IAAI,EAAE,CAAC,GAAG;AAC5E,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,GAAG;AAC7F,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACrD,gBAAgB,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC;AAC5D,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;AAC3B,oBAAoB,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,qBAAqB,CAAC,CAAC,EAAE;AAC7F,gBAAgB,CAAC;AACjB,gBAAgB,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,eAAe,EAAE;AACrE,YAAY,CAAC;AACb;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,GAAG;AAC9D;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9H,gBAAgB,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE;AACpE,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACnE,gBAAgB,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACnE;AACA,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACrD,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE;AACtC,gBAAgB,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC;AAC3C,oBAAoB,CAAC,KAAK,CAAC,KAAK,CAAC;AACjC,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,CAAC;AACpD,oBAAoB,CAAC,UAAU,EAAE;AACjC,oBAAoB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACvC,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,KAAK,EAAE;AAC7C,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE;AAC1C,YAAY,CAAC;AACb;AACA,YAAY,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;AACvC,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC;AACrC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC7C,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3C,wBAAwB,KAAK,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE;AAC7D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC7C,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3C,wBAAwB,KAAK,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE;AAC7D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC5C,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3C,wBAAwB,KAAK,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE;AAC7D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,UAAU,EAAE;AAC7B,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACnC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3C,oBAAoB,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;AAC9C,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,GAAG;AAC1C;AACA,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACnE,gBAAgB,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,YAAY,CAAC,KAAK,EAAE;AAClE,YAAY,GAAG;AACf,YAAY,MAAM;AAClB,cAAc,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,cAAc,EAAE;AAClD,cAAc,CAAC,IAAI,CAAC,UAAU,CAAC;AAC/B,cAAc,CAAC,KAAK,EAAE;AACtB,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE;AAC7B,cAAc,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,cAAc,EAAE;AACjD,cAAc,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/F,cAAc,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,QAAQ,CAAC,gBAAgB,EAAE;AAC3C,kBAAkB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACjC,kBAAkB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AAC/C,kBAAkB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AACvD,kBAAkB,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC;AACvD,gBAAgB,EAAE;AAClB;AACA,cAAc,EAAE;AAChB,cAAc,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,kBAAkB,QAAQ,CAAC,gBAAgB,EAAE;AAC7C,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACrC,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AACnD,sBAAsB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC1D,kBAAkB,EAAE;AACpB,cAAc,EAAE;AAChB,cAAc,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,kBAAkB,QAAQ,CAAC,eAAe,EAAE;AAC5C,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACrC,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AACnD,sBAAsB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC1D,kBAAkB,EAAE;AACpB,cAAc,GAAG;AACjB;AACA,YAAY,CAAC,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,cAAc,EAAE;AACjD,cAAc,CAAC,IAAI,CAAC,UAAU,CAAC;AAC/B,cAAc,CAAC,UAAU,EAAE;AAC3B,cAAc,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACjC,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AACxH;AACA,YAAY,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC5E,gBAAgB,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,gBAAgB,CAAC,KAAK,EAAE;AACtE,YAAY,GAAG;AACf,YAAY,MAAM;AAClB,cAAc,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AAC9C,cAAc,CAAC,IAAI,CAAC,eAAe,CAAC;AACpC,cAAc,CAAC,KAAK,EAAE;AACtB,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE;AAC7B,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,GAAG;AACjC,cAAc,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE;AAC7C,cAAc,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AAC7D,cAAc,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE;AAC9B,cAAc,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AAC7D,cAAc,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,cAAc,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,QAAQ,CAAC,gBAAgB,EAAE;AAC3C,kBAAkB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACjC,kBAAkB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AAC/C,kBAAkB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AACvD,kBAAkB,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC;AACvD,gBAAgB,EAAE;AAClB;AACA,cAAc,EAAE;AAChB,cAAc,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,kBAAkB,QAAQ,CAAC,gBAAgB,EAAE;AAC7C,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACrC,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AACnD,sBAAsB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC1D,kBAAkB,EAAE;AACpB,cAAc,EAAE;AAChB,cAAc,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,kBAAkB,QAAQ,CAAC,eAAe,EAAE;AAC5C,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACrC,sBAAsB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AACnD,sBAAsB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC1D,kBAAkB,EAAE;AACpB,cAAc,GAAG;AACjB;AACA,YAAY,CAAC,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AAC7C,cAAc,CAAC,IAAI,CAAC,eAAe,CAAC;AACpC,cAAc,CAAC,UAAU,EAAE;AAC3B,cAAc,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACjC,cAAc,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AAC7D,cAAc,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG;AAC9D;AACA,YAAY,IAAI,CAAC,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AACvC,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,kBAAkB,CAAC,CAAC,EAAE;AACxE,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,KAAK,CAAC;AACrC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC7C,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3C,wBAAwB,KAAK,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AAC9D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,kBAAkB,CAAC,CAAC,EAAE;AACxE,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,KAAK,CAAC;AACrC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC;AACxH,QAAQ,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC;AAClH,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC;AACpH,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACpF,QAAQ,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC/E,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC9F,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM;AAC/F,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACvB,YAAY,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE;AAC9D,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AACF;AACA;ACvTA;AACA,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,eAAe,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,cAAc;AAC3E,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC;AAChE,EAAE,CAAC,IAAI,GAAG,QAAQ,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,WAAW,CAAC;AAC7D,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACpC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG;AACpC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AACtC;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC5C,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK;AACzB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC;AAC7D,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;AAClD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACrE,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC;AACtD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AAClC,QAAQ,CAAC;AACT;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,aAAa,CAAC,KAAK,EAAE;AAC9B;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE;AAC3D,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACjD,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AAC7E,gBAAgB,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AAC/E,gBAAgB,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC,EAAE,CAAC,UAAU,EAAE;AACjF;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG,IAAI,EAAE,CAAC,GAAG;AACjF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG;AAClG,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AAC1D;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC;AACvC,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACtC,gBAAgB,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,OAAO;AACjJ,gBAAgB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG;AAC5E;AACA,YAAY,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC;AAC9D,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACxD,gBAAgB,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;AACtC,gBAAgB,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,IAAI;AACnC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC;AACnC,YAAY,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;AAChC;AACA,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9H,gBAAgB,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE;AACpE;AACA,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,EAAE;AAC/D,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,EAAE;AAC3C,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AACtG,YAAY,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE;AAChC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE;AAC1C,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG;AACvD;AACA,YAAY,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE;AAChC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AAC7C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,GAAG,EAAE;AAClC,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG;AAC1D;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,EAAE;AACzC;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG;AACxD,YAAY,EAAE,CAAC,UAAU,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,EAAE;AACnD;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC;AACvC,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAC7E;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC;AACtC,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AAC/C,gBAAgB,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtF,oBAAoB,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AACzD,gBAAgB,GAAG;AACnB;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC;AAC3D,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACpD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AACvF,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AACxC;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,CAAC;AAC5C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AAC9C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,GAAG,EAAE;AAClC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,gBAAgB,CAAC,IAAI,CAAC,MAAM,EAAE;AAC9B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC;AAClE,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC;AAChD,gBAAgB,CAAC,UAAU,EAAE;AAC7B,gBAAgB,CAAC,QAAQ,CAAC,MAAM,CAAC,QAAQ,GAAG;AAC5C,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AACvF,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACrC;AACA,YAAY,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE;AACrC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,CAAC;AAC5C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD;AACA,YAAY,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE;AACrC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACpD;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC;AACjE,YAAY,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,GAAG;AACtC,gBAAgB,CAAC,UAAU,EAAE;AAC7B,gBAAgB,CAAC,QAAQ,CAAC,MAAM,CAAC,QAAQ,GAAG;AAC5C,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AACvF,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACvC,gBAAgB,CAAC,MAAM,GAAG;AAC1B,QAAQ,GAAG;AACX;AACA,QAAQ,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG;AACzB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAClE,QAAQ,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AAC3B,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AAC7B,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK;AAC5B,QAAQ,EAAE;AACV,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACjE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAClE,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC;AACxH,QAAQ,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC;AAClH,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC;AACpH,QAAQ,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC/E,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC9F,QAAQ,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC/E,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM;AAC/F,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACvB,YAAY,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE;AAC9D,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE;AAC3C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AACF;AACA;ACvNA;AACA,EAAE,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS;AACnB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACjD,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;AACnD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACjD,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAC/C,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,OAAO,KAAK,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK;AACtI,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACzB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK;AAC7B,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE;AACvL,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC;AAC7C,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAC1E;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG;AACrF;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC;AACxB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG;AAC1J,YAAY,IAAI;AAChB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACzF;AACA,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;AACjC,oBAAoB,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,GAAG;AACtE,oBAAoB,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE;AACtE,gBAAgB,CAAC;AACjB,YAAY,EAAE,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AACpD;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK;AAC3H,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE,MAAM,GAAG;AACvG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,cAAc,GAAG;AACrG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AACzD;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,SAAS;AACrB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,QAAQ,CAAC,UAAU,EAAE;AACzC,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACtC,wBAAwB,EAAE,CAAC,CAAC,EAAE;AAC9B,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACvD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AAC9D,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AAC1F;AACA,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,GAAG,SAAS,GAAG,EAAE,CAAC,IAAI,EAAE;AACtE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AAChD,YAAY,KAAK,CAAC,IAAI,GAAG,MAAM,GAAG;AAClC;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG;AACvD;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,CAAC,GAAG,CAAC;AAC1D,YAAY,KAAK;AACjB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG;AACnK;AACA,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE;AACjD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK,EAAE;AACtD,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE;AACnG,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACxE,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AACxE;AACA,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE;AACjD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,EAAE;AAC9D,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC5E,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACrH,oBAAoB,CAAC,CAAC,IAAI;AAC1B,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC;AACxC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7C,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,oBAAoB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE;AAClF,gBAAgB,GAAG;AACnB;AACA,YAAY,KAAK,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,CAAC,KAAK,GAAG,UAAU,EAAE;AAC9D,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE;AACnG,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACxE,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AACxE;AACA,YAAY,KAAK,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,CAAC,KAAK,GAAG,UAAU,EAAE;AAC9D,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC5E,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACrH,oBAAoB,CAAC,CAAC,IAAI;AAC1B,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC;AACxC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7C,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,oBAAoB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE;AAClF,gBAAgB,GAAG;AACnB,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA;AACA,IAAI,EAAE,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC5E,IAAI,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC9D,QAAQ,KAAK,CAAC,eAAe,GAAG;AAChC,QAAQ,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC;AACvE,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,WAAW,CAAC;AAC1C,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACxC,QAAQ,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE;AAC7D,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,CAAC;AACpC,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC/E,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACvE,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACvE,QAAQ,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACnF,QAAQ,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACnF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACnF,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACrF,QAAQ,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AACzE,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC9F;AACA,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC1E,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC1E,QAAQ,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACpF,QAAQ,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAChF,QAAQ,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAC9E;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC1E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC7E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC3E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACjOF;AACA,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC5C,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAChC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACvC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,oBAAoB,EAAE;AACtD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,KAAK;AACzC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACzB,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,EAAE;AACzB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;AACpD,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE;AAC3E,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,GAAG;AAClC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACtG,QAAQ,CAAC;AACT;AACA,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACpB,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B;AACA,IAAI,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE;AAC1C,IAAI,KAAK,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AACvD;AACA,IAAI,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,IAAI,GAAG,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC9B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC;AAChE,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AACrE,gBAAgB,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC/B,gBAAgB,QAAQ,CAAC,CAAC,QAAQ;AAClC,YAAY,EAAE;AACd,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC;AAC1C,gBAAgB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC;AACtC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC;AAC7C,gBAAgB,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;AAC1C,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AAC3C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AAClC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,SAAS,CAAC,OAAO,EAAE,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE;AACtD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnC,oBAAoB,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1C,gBAAgB,IAAI;AACpB,oBAAoB,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;AACzE,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACxD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,gBAAgB;AACpD,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE;AAC9C,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,SAAS,CAAC;AAC3C,gBAAgB,CAAC,EAAE,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC;AACrC,gBAAgB,CAAC,EAAE,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE;AACxC;AACA;AACA,YAAY,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,gBAAgB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,CAAC;AAC1C,oBAAoB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AAClD,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,gBAAgB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACrC,gBAAgB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG;AACzD,gBAAgB,UAAU,GAAG;AAC7B,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAgB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,CAAC;AAC1C,oBAAoB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC,IAAI,GAAG;AAC7C;AACA,gBAAgB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK;AACnE,gBAAgB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACtC,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AAC/B;AACA,YAAY,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AAC5B,gBAAgB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI;AACxC,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE;AACzE,oBAAoB,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,wBAAwB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AAChF;AACA,wBAAwB,EAAE,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI;AACnF,wBAAwB,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;AAC7E;AACA,wBAAwB,MAAM,CAAC,CAAC;AAChC,gCAAgC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,GAAG;AAC/F,gCAAgC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE;AAC9F,wBAAwB,EAAE;AAC1B,oBAAoB,GAAG;AACvB;AACA,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC;AACtC,oBAAoB,EAAE,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACvE,oBAAoB,EAAE,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtE,gBAAgB,EAAE;AAClB;AACA,gBAAgB,KAAK,CAAC,OAAO,CAAC,cAAc,EAAE;AAC9C,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,KAAK,CAAC,OAAO,CAAC,IAAI,EAAE;AACpC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM;AAC3F,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,cAAc,EAAE;AAC3C,gBAAgB,CAAC,KAAK,CAAC,IAAI,EAAE;AAC7B;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AAC/C;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,wBAAwB,CAAC,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE;AACtF,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,GAAG;AACvF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,cAAc,GAAG,MAAM,EAAE,CAAC,GAAG;AAC9G,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,KAAK,EAAE,OAAO,CAAC,MAAM,GAAG,IAAI,GAAG;AAC5F,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG,KAAK,EAAE,OAAO,CAAC,MAAM,EAAE,wBAAwB,EAAE;AAC9G,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,YAAY,GAAG,KAAK,EAAE,OAAO,CAAC,MAAM,GAAG,IAAI,GAAG;AAC/F,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,YAAY,GAAG;AAChE;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,KAAK,CAAC,cAAc,EAAE;AAC7C;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F,gBAAgB,CAAC;AACjB;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC3E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,QAAQ;AACvB,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,iBAAiB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,GAAG,SAAS,MAAM,MAAM,GAAG;AACtE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AACpC,oBAAoB,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC;AACnE,gBAAgB,EAAE;AAClB;AACA,gBAAgB,QAAQ;AACxB,oBAAoB,CAAC,KAAK,CAAC,GAAG,CAAC;AAC/B,oBAAoB,CAAC,KAAK,IAAI,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,GAAG;AACpD,oBAAoB,CAAC,UAAU,CAAC,KAAK,CAAC;AACtC,oBAAoB,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE;AACpE,gBAAgB,CAAC;AACjB;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,EAAE;AAC5C,oBAAoB,CAAC,KAAK,CAAC,YAAY,CAAC;AACxC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC3E,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE;AACpC,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC;AACnD,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG;AAClF;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,YAAY,GAAG,MAAM,GAAG,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU;AACpF,YAAY,EAAE,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;AACtC,gBAAgB,IAAI,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,YAAY,EAAE;AACjE,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE;AACxC,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,EAAE;AAChD,oBAAoB,CAAC,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI;AAC5I,YAAY,CAAC;AACb;AACA,YAAY,EAAE,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK;AACtC,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,gBAAgB;AAChC,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,eAAe,CAAC;AAC5C,oBAAoB,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE;AAC9D,oBAAoB,CAAC,YAAY,CAAC,SAAS,CAAC;AAC5C,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE;AAC/B,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACtE,YAAY,CAAC;AACb;AACA,YAAY,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC3C,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AAC3C,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,KAAK;AACjB,gBAAgB,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;AACtD,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,EAAE;AACrD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI;AAC5F;AACA,YAAY,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE;AAClC;AACA,YAAY,EAAE,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC;AAC5D,YAAY,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,gBAAgB,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,YAAY,GAAG;AACf;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,gBAAgB,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE;AACnD,YAAY,GAAG;AACf;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,GAAG,SAAS,EAAE,IAAI,EAAE;AACzE,gBAAgB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG;AAClE;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,gBAAgB,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC;AACxF,gBAAgB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG;AACzC,gBAAgB,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACvC,gBAAgB,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,eAAe,CAAC;AACnE,gBAAgB,MAAM,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE;AACd;AACA,YAAY,QAAQ,CAAC,KAAK,EAAE;AAC5B,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE;AAC/B,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC;AACxC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,EAAE,CAAC,EAAE,EAAE;AAClD,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE;AAC1D,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,cAAc,CAAC;AAC1C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,WAAW,CAAC;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,WAAW,EAAE;AACzC;AACA,YAAY,QAAQ;AACpB,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,EAAE;AACpD,oBAAoB,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW;AAC1E,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG;AAC7C,oBAAoB,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACrE,oBAAoB,MAAM,CAAC,CAAC,CAAC;AAC7B,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,cAAc,CAAC;AAC1C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,WAAW,CAAC;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,WAAW,EAAE;AACzC;AACA,YAAY,QAAQ,CAAC,IAAI,GAAG,MAAM,GAAG;AACrC;AACA,YAAY,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI;AAC/B,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,SAAS,EAAE;AAChE,gBAAgB,CAAC,IAAI,EAAE,KAAK,GAAG;AAC/B,YAAY,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE;AAC1E,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;AACjC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,GAAG,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;AACzC,gBAAgB,CAAC,KAAK,EAAE,OAAO,CAAC,MAAM,GAAG,GAAG,EAAE;AAC9C,gBAAgB,CAAC,IAAI,CAAC,SAAS,EAAE;AACjC;AACA,YAAY,SAAS;AACrB,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AACzF,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3E,oBAAoB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC5E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC;AACnC,gBAAgB,SAAS;AACzB,oBAAoB,CAAC,IAAI,EAAE,KAAK,GAAG;AACnC;AACA,gBAAgB,EAAE,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC;AAC1E,gBAAgB,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC;AAC7D,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,GAAG;AACnD,gBAAgB,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClC,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,gBAAgB,KAAK,CAAC,QAAQ,CAAC,WAAW,EAAE;AAC5C,YAAY,CAAC;AACb;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AAC3C,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AACzC,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE;AAChD,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG;AAC7D;AACA,oBAAoB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK;AACvE,oBAAoB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC1C,oBAAoB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAChD;AACA,oBAAoB,UAAU,GAAG;AACjC,gBAAgB,GAAG;AACnB;AACA,YAAY,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3D,gBAAgB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AACvC,gBAAgB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;AACtC;AACA,gBAAgB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK;AACnE,gBAAgB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACtC,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C;AACA,gBAAgB,UAAU,GAAG;AAC7B,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D,gBAAgB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACzC,gBAAgB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACvC;AACA,gBAAgB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AACzC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E,gBAAgB,KAAK,CAAC,eAAe,GAAG;AACxC,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,UAAU,CAAC,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AAC1E;AACA,gBAAgB,IAAI;AACpB,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAChD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACnG,wBAAwB,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,IAAI,EAAE;AAClE,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;AAC9D,wBAAwB,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,MAAM,CAAC;AACjE,wBAAwB,EAAE,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AACpF,wBAAwB,EAAE,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,GAAG;AAChI,wBAAwB,OAAO,CAAC,IAAI,EAAE;AACtC,4BAA4B,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5C,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,UAAU,EAAE;AAChE,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC;AACnE,wBAAwB,GAAG;AAC3B,oBAAoB,GAAG;AACvB;AACA,gBAAgB,EAAE,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC;AAC3F,gBAAgB,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,oBAAoB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE;AACjE,oBAAoB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,GAAG,CAAC,GAAG;AACzG,oBAAoB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC;AACxD,oBAAoB,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,SAAS,EAAE;AAC3H,oBAAoB,EAAE,CAAC,CAAC,gBAAgB,CAAC,GAAG,CAAC,IAAI,CAAC;AAClD,wBAAwB,OAAO,CAAC,gBAAgB,EAAE,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnE,gBAAgB,CAAC;AACjB;AACA,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,CAAC,CAAC,GAAG,WAAW,CAAC,UAAU,EAAE,CAAC,UAAU,EAAE;AAC/F,gBAAgB,gBAAgB,CAAC,OAAO;AACxC,oBAAoB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE;AACrD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,CAAC;AAC1B,oBAAoB,CAAC;AACrB,wBAAwB,KAAK,CAAC,CAAC,MAAM,CAAC;AACtC,wBAAwB,MAAM,CAAC,CAAC,OAAO;AACvC,oBAAoB,CAAC;AACrB,gBAAgB,IAAI;AACpB;AACA,gBAAgB,gBAAgB,CAAC,eAAe,CAAC,cAAc,EAAE;AACjE,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxE,gBAAgB,KAAK,CAAC,eAAe,GAAG;AACxC,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AACvE,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB;AACA,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACrD,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACtC,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;AAC1C;AACA,oBAAoB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C;AACA,oBAAoB,SAAS;AAC7B,wBAAwB,CAAC,IAAI,EAAE,KAAK,GAAG;AACvC,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1C,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,mBAAmB,CAAC,SAAS,GAAG;AAC/D;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACjE,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACrB,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,KAAK,EAAE;AACpC,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,KAAK,EAAE;AACpC,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AAClC,QAAQ,EAAE;AACV,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1B,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5B,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,SAAS;AAChB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,IAAI,CAAC;AAC/B,IAAI,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE;AACzD,IAAI,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAClC,QAAQ,EAAE,CAAC,EAAE,eAAe,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG;AAC1D,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AAC/B,gBAAgB,MAAM,CAAC,IAAI,CAAC;AAC5B,YAAY,CAAC;AACb,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;AAC9C,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACrC,gBAAgB,MAAM,CAAC,IAAI,CAAC;AAC5B,YAAY,CAAC;AACb,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,GAAG,EAAE;AACrD;AACA,YAAY,EAAE,IAAI,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK;AAClH,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC;AAC5C,gBAAgB,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC;AAClK;AACA,gBAAgB,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC;AACzC,gBAAgB,MAAM,CAAC,IAAI,CAAC;AAC5B,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC;AACtC;AACA,YAAY,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACvE,gBAAgB,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3F,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,GAAG;AACf;AACA,YAAY,MAAM,CAAC,IAAI,CAAC;AACxB,QAAQ,EAAE;AACV,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC9C,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACzF,QAAQ,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACrG,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,QAAQ,GAAG;AACX,QAAQ,uBAAuB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrG,YAAY,uBAAuB,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7B,gBAAgB,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AACzC,gBAAgB,KAAK,CAAC,UAAU,CAAC,KAAK,EAAE;AACxC,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAClD,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC1E,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE;AAC1C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC7oBF,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK;AAC3E,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACpC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS;AACnB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE;AAChC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC,MAAM;AACnJ,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AAC5B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,EAAE;AACzC,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,SAAS,EAAE;AACzJ,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;AACnC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC;AACf,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS;AAC/D,YAAY,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,gBAAgB,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACvD,oBAAoB,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,OAAO;AACpF,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS;AAChH,gBAAgB,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACvE,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB;AACA,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACxF,gBAAgB,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,EAAE,CAAC,CAAC,CAAC,EAAE;AAC/D,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,IAAI;AACnH;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM;AAC9E,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG;AAC5H,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AACzD;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK;AAC5C,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,GAAG,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACnD;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG,IAAI,EAAE,IAAI,GAAG;AACpF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG;AAClG,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AAC1D,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,IAAI,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK;AAC7G,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AACzE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AAC9E,YAAY,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAC7C,YAAY,MAAM,CAAC,IAAI,EAAE;AACzB,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE;AACzE,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5C,gBAAgB,CAAC,MAAM,GAAG;AAC1B,YAAY,MAAM;AAClB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG;AAClE,YAAY,MAAM;AAClB,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,MAAM,EAAE;AACpE,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,EAAE;AAC5C;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE;AACnD,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACvD,YAAY,IAAI,CAAC,IAAI,GAAG,MAAM,GAAG;AACjC;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACpD,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACnG,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI;AACnG,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC3D,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AACvC,oBAAoB,QAAQ,CAAC,YAAY,EAAE;AAC3C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AAC7D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACxC,wBAAwB,OAAO,CAAC,CAAC,OAAO;AACxC,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,GAAG;AACnB;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACjE;AACA,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAC7B,gBAAgB,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE;AACxC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AAClD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE;AACnC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC1E,oBAAoB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE;AAC3E,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtG;AACA,gBAAgB,CAAC;AACjB,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,IAAI,CAAC,SAAS,EAAE,IAAI,GAAG,MAAM,GAAG;AAChD,YAAY,CAAC;AACb;AACA,YAAY,IAAI;AAChB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC9G,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9E,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAChF,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE;AAC/B,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,SAAS,CAAC;AACzC,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE;AACvE,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE;AACjE,YAAY,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE;AAClE,gBAAgB,GAAG,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE;AACnF,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAClE,wBAAwB,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,4BAA4B,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,gCAAgC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,4BAA4B,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAClF,4BAA4B,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG;AACzC;AACA,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;AACjE,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE;AAC/B,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACtE,gBAAgB,GAAG;AACnB;AACA;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,SAAS,GAAG;AACvD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC9E,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC5E,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC7PF;AACA,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACzC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,WAAW,EAAE;AAC7C,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AAC9B,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,EAAE;AACrC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AACrB,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK;AAC/B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AAC5B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,GAAG,SAAS,EAAE;AAC5D,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC;AACT;AACA,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE;AACzB,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC;AAC1B,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AAC7C,IAAI,CAAC;AACL,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACrD,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,GAAG;AACtC,IAAI,CAAC;AACL;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,aAAa,CAAC,KAAK,CAAC;AAC7B,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,WAAW,EAAE;AACxC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,gBAAgB,QAAQ,CAAC,YAAY,GAAG;AACxC,gBAAgB,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE;AACtE,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AAClD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,GAAG;AACrC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,GAAG,KAAK,CAAC,IAAI,EAAE;AACjD;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,mBAAmB,GAAG,IAAI,EAAE,IAAI,GAAG;AAC5F,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,mBAAmB,GAAG,MAAM,EAAE,CAAC,GAAG;AACnH,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAClD,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AAC5D,gBAAgB,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AACzD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC5D,KAAK,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AACvD;AACA,YAAY,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACrF;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,KAAK,CAAC,cAAc,EAAE;AAC7C;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F,gBAAgB,CAAC;AACjB;AACA,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC7C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC3E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,YAAY,WAAW;AACvB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,EAAE;AACzC;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,EAAE;AACnD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI;AACxE;AACA,YAAY,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE;AACpD;AACA;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE,GAAG;AAClE,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACtE,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvE,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;AACnC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACtE;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,WAAW,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK;AAC3I,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE;AACtD;AACA,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,SAAS,EAAE,CAAC,GAAG;AACtE,gBAAgB,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AACpC,oBAAoB,MAAM;AAC1B,wBAAwB,CAAC,SAAS,EAAE,IAAI,EAAE;AAC1C,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACvH,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC;AACnC,oBAAoB,MAAM;AAC1B,wBAAwB,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,EAAE;AAChD,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC9E,wBAAwB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AAClF,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACjC,oBAAoB,CAAC,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,EAAE;AAC7C,wBAAwB,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,SAAS,GAAG;AAC1E,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE;AACtD,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,IAAI;AACxB,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,EAAE,GAAG,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC;AAChF,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjC,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG;AAC7D,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,WAAW,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACvE,QAAQ,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACrC,YAAY,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACvC,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK;AAC5B,QAAQ,EAAE;AACV,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,WAAW,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACtE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,WAAW,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACvE,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC;AACpC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AACpF,QAAQ,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACpG,QAAQ,YAAY,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AAClG,QAAQ,UAAU,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,GAAG,CAAC,IAAI;AAC9F,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,WAAW,CAAC,QAAQ,CAAC,QAAQ,EAAE;AAC3C,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,WAAW,CAAC,KAAK,CAAC,KAAK,EAAE;AACrC,KAAK,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AACzB,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAClD,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE;AAChD,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,CAAC;AC1RD;AACA,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,WAAW,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI;AAC3E,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,QAAQ;AAC5D,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC1E,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACnC,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE;AAC7C,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC;AACf,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,8DAA8D;AAClE;AACA;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE;AACnH,gBAAgB,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAChD,gBAAgB,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B;AACA,YAAY,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC;AACrC;AACA,YAAY,8DAA8D;AAC1E;AACA;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,YAAY,GAAG,IAAI,EAAE,IAAI,GAAG;AAC7E,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,YAAY,GAAG;AAC3F,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AACvF;AACA,YAAY,8DAA8D;AAC1E;AACA;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACnD,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AAC9E;AACA,YAAY,QAAQ,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG;AACzC,YAAY,QAAQ;AACpB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACjF,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACvE;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAChE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;AACtD,YAAY,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAChF,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAChF,YAAY,WAAW,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,GAAG,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE;AACjG,gBAAgB,EAAE,CAAC,CAAC,UAAU,EAAE;AAChC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC/E,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC/E,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,MAAM,GAAG;AAC1B,YAAY,IAAI;AAChB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvG,gBAAgB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACrC,gBAAgB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,YAAY,WAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;AAChD,gBAAgB,EAAE,CAAC,CAAC,UAAU,EAAE;AAChC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC/E,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC/E;AACA;AACA,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG;AAClC;AACA,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,SAAS,EAAE,YAAY,CAAC,SAAS,GAAG;AACxD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B;AACA,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC7C,QAAQ,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC/E,QAAQ,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACjF,QAAQ,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAClF,QAAQ,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAChF,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5C,QAAQ,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAClB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC3C,QAAQ,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACjB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC3C,QAAQ,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACjB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC;AAC9C,QAAQ,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE;AAChC,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5C,QAAQ,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAClB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5C,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrC,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAC/C,QAAQ,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,QAAQ,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACpC,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN,IAAI,8DAA8D;AAClE;AACA;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,CAAC;AC/JD,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;AACrC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7C,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE;AAChC,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACzD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK;AAC3B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AAC5B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,SAAS,EAAE;AACjE,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC;AACT;AACA,IAAI,OAAO,CAAC,WAAW,CAAC,KAAK,EAAE;AAC/B,IAAI,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG;AACvD;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,OAAO,EAAE;AACpC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;AACxC,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,oBAAoB,SAAS,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE;AAC5C,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE;AAC1E,gBAAgB,CAAC;AACjB,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG,IAAI,EAAE,IAAI,GAAG;AACtE,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG;AAC7F,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,MAAM,EAAE,IAAI,GAAG;AAC7E,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,eAAe,GAAG;AACnE,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AAC9D;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AAC3C,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD,gBAAgB;AAChB,YAAY,OAAO;AACnB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxE;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,EAAE;AACzD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI;AACzE;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,WAAW,EAAE,IAAI,CAAC,OAAO,EAAE;AACrD,YAAY;AACZ,YAAY,EAAE,CAAC,KAAK,CAAC,KAAK;AAC1B,YAAY,KAAK;AACjB,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AACzC,oBAAoB,OAAO,CAAC,YAAY,EAAE;AAC1C,gBAAgB,GAAG;AACnB;AACA,YAAY,KAAK,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;AAC9C,gBAAgB,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AACpC,oBAAoB,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,IAAI;AAClF,gBAAgB,CAAC;AACjB,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,WAAW,EAAE;AACvD;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,eAAe,GAAG,SAAS,EAAE,CAAC,EAAE;AACxE,gBAAgB,CAAC,IAAI,EAAE,WAAW,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK;AACvD,IAAI;AACJ,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE;AAC9C,gBAAgB,CAAC,MAAM,EAAE,CAAC,GAAG;AAC7B;AACA,YAAY,YAAY,CAAC,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,EAAE;AACtC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,YAAY,CAAC,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE;AACnD,gBAAgB,CAAC,IAAI,CAAC,KAAK,EAAE;AAC7B,YAAY,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD,YAAY,MAAM,CAAC,SAAS,GAAG,MAAM,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,UAAU,EAAE;AAC7E;AACA,YAAY,OAAO,CAAC,IAAI,EAAE;AAC1B;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AAC3C,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AACnD,EAAE;AACF,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC5E,gBAAgB,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACxD,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACxD,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb,YAAY;AACZ,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACrC,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AACxE;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,SAAS;AACxB,YAAY,8DAA8D;AAC1E,IAAI;AACJ,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE;AAC7E,YAAY,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AACpC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,oBAAoB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,YAAY,CAAC;AACb,IAAI;AACJ,IAAI;AACJ,YAAY,QAAQ,CAAC,aAAa,EAAE,CAAC,CAAC;AACtC,gBAAgB,EAAE,CAAC,EAAE,KAAK,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,WAAW,EAAE;AAC9D,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,IAAI,EAAE,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE;AACrE,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,wBAAwB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,EAAE;AAC/D,4BAA4B,UAAU,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAClE,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,EAAE;AACvD,4BAA4B,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE;AAC3E,IAAI;AACJ,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,KAAK,EAAE;AACxD,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC/C,4BAA4B,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,EAAE;AAC5E,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb;AACA;AACA,YAAY,QAAQ,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC;AAC9C,gBAAgB,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACpE,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACzE,gBAAgB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG;AAC/D,gBAAgB,aAAa,GAAG;AAChC,gBAAgB,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AACrC,oBAAoB,QAAQ,CAAC,OAAO,CAAC,MAAM,EAAE;AAC7C,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,KAAK,CAAC,SAAS,GAAG;AACjD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC7F,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC9F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACvC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE;AACjC,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,CAAC,WAAW,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvF,YAAY,OAAO,CAAC,WAAW,CAAC,CAAC,EAAE;AACnC,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpF,YAAY,KAAK,CAAC,UAAU,CAAC,CAAC,EAAE;AAChC,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpF,YAAY,KAAK,CAAC,UAAU,CAAC,CAAC,EAAE;AAChC,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AACzB,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AACzB,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC9D,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE;AAC5C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACrSF,EAAE,CAAC,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC3C,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE;AAC7C,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,IAAI,GAAG,GAAG;AAC7C,QAAQ,CAAC,CAAC,OAAO,MAAM,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AAC5C,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC;AACpE,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG;AACvB,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AACpB,QAAQ,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK;AAC1E,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;AAC1D,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;AAC1D,QAAQ,CAAC;AACT;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,UAAU,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACtC,UAAU,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACtC;AACA,UAAU,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACjF,cAAc,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACpF;AACA,UAAU,SAAS;AACnB,kBAAkB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAChD,kBAAkB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACnD;AACA,UAAU,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAChE,UAAU,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC;AACpD,cAAc,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AAC/C,cAAc,MAAM,CAAC,KAAK,CAAC;AAC3B,UAAU,CAAC,CAAC,IAAI,CAAC,CAAC;AAClB,cAAc,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AACzD,UAAU,CAAC;AACX,UAAU,SAAS,CAAC,SAAS,MAAM,MAAM,GAAG;AAC5C;AACA,UAAU,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK;AACrD,UAAU,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,GAAG;AACvC,UAAU,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7C,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE;AACzC,YAAY,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACxC,cAAc,YAAY,CAAC,GAAG,CAAC,GAAG,EAAE;AACpC,YAAY,GAAG;AACf,UAAU,GAAG;AACb;AACA,UAAU,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE;AACvC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;AAClC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;AAClC,gBAAgB,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC,eAAe,EAAE;AACxD,gBAAgB,CAAC,YAAY,CAAC,YAAY,CAAC;AAC3C,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACnC,gBAAgB,CAAC,YAAY,CAAC,QAAQ,CAAC;AACvC,gBAAgB,CAAC,MAAM,CAAC,MAAM,CAAC;AAC/B,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC;AACjC,gBAAgB,CAAC,KAAK,CAAC,KAAK,CAAC;AAC7B,gBAAgB,CAAC,KAAK,CAAC,KAAK,CAAC;AAC7B,gBAAgB,CAAC,KAAK,GAAG;AACzB;AACA,UAAU,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,GAAG,IAAI,EAAE;AACjD,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;AACjC,gBAAgB,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE;AAC/C,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG;AACnF;AACA,UAAU,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,GAAG,IAAI,EAAE;AACjD,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;AACjC,gBAAgB,CAAC,KAAK,EAAE;AACxB,gBAAgB,CAAC,MAAM,EAAE,CAAC,EAAE;AAC5B,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE;AAC/C,gBAAgB,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE;AAClC;AACA,UAAU,IAAI;AACd,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE;AAC7B,YAAY,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC;AAC9B,YAAY,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5D,YAAY,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,cAAc,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC;AAChG,kBAAkB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE;AACtC,cAAc,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC;AAChG,kBAAkB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE;AACtC;AACA,cAAc,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,EAAE;AACvC,cAAc,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACzC,cAAc,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AAC9B,cAAc,YAAY,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACpD,gBAAgB,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE;AACjC,kBAAkB,KAAK,CAAC,CAAC,SAAS,CAAC;AACnC,kBAAkB,GAAG,CAAC,GAAG,KAAK,CAAC;AAC/B,kBAAkB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AACnC,gBAAgB,GAAG;AACnB,cAAc,GAAG;AACjB,cAAc,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AAC9C,YAAY,EAAE;AACd,YAAY,CAAC,EAAE,EAAE,QAAQ,EAAE,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,cAAc,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,YAAY,GAAG;AACf;AACA,UAAU,OAAO,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,KAAK;AAChE;AACA,UAAU,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC;AAC/D,UAAU,UAAU,CAAC,IAAI,EAAE;AAC3B,UAAU,UAAU,CAAC,IAAI,EAAE;AAC3B;AACA,UAAU,KAAK,CAAC,EAAE,EAAE,IAAI,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,cAAc,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACjE,kBAAkB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACjE,kBAAkB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACjE,kBAAkB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG;AAClE;AACA,cAAc,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,gBAAgB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AAC7D,cAAc,GAAG;AACjB,YAAY,GAAG;AACf,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF;AACA,QAAQ,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,UAAU;AACnD,QAAQ,YAAY,EAAE,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AAChG,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACpF,QAAQ,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACtF,QAAQ,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAClF,QAAQ,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAClF,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACpF;AACA,QAAQ,EAAE,OAAO,CAAC,OAAO;AACzB,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AAClF,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AAClF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3E,YAAY,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,QAAQ,GAAG;AACX,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3E,YAAY,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC5LF,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAC/C,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAClD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,EAAE;AACrC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO;AAC3D,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO;AACvG,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC;AAChH,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;AACtJ,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;AAC1B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,cAAc,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,cAAc,EAAE,CAAC,CAAC,WAAW,EAAE;AACrH,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC;AAChE,QAAQ,CAAC;AACT;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG,IAAI,EAAE,IAAI,GAAG;AACvE,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG;AAC9F,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,EAAE;AAClD,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;AACnD;AACA,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AAC/D,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE;AACnF;AACA,YAAY,GAAG,CAAC,WAAW,CAAC;AAC5B;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AACnC,gBAAgB,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE;AAC5C,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AAC7C,oBAAoB,CAAC,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrD,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAClC;AACA,gBAAgB,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG;AACtD,YAAY,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AAC3C,gBAAgB,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE;AAC1C,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AAC7C,oBAAoB,CAAC,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrD,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE;AACnC;AACA,gBAAgB,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AACpD;AACA,gBAAgB,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE;AACvC,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE;AAClD,oBAAoB,CAAC,QAAQ,EAAE,SAAS,IAAI,IAAI,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,IAAI,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,IAAI,IAAI,GAAG;AACnN,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI;AACtE;AACA,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,GAAG,GAAG;AACpE;AACA,gBAAgB,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE;AACrD,wBAAwB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG;AAC3D,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb;AACA,YAAY,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE;AACtC,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,KAAK,EAAE;AAC7C,gBAAgB,CAAC,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC/C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG;AACjC;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG;AAClE;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO;AACpG,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9C,oBAAoB,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ;AACvE,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,GAAG;AAC7C,oBAAoB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AACtC,wBAAwB,EAAE,CAAC,IAAI,CAAC,GAAG,OAAO,EAAE,CAAC,CAAC;AAC9C,4BAA4B,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClD,gCAAgC,EAAE,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC;AAClF,gCAAgC,EAAE,EAAE,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC;AAClE,gCAAgC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,GAAG;AACzF,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AACnD,4BAA4B,CAAC;AAC7B,4BAA4B,IAAI,CAAC,CAAC;AAClC,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACzD,gCAAgC,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,GAAG,CAAC,CAAC;AAC7F,oCAAoC,EAAE,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM;AACrG,oCAAoC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AAC7E,oCAAoC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,GAAG;AAC9F,gCAAgC,CAAC;AACjC,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AACtD,4BAA4B,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC1C,gCAAgC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AAC7D,gCAAgC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;AAC7G,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC;AAC5E,4BAA4B,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AACnD,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACzD,gCAAgC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC5D,gCAAgC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,GAAG;AACjG,gCAAgC,EAAE,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AACrG,oCAAoC,EAAE,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM;AACrG,oCAAoC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AAC7E,oCAAoC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACnE,wCAAwC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC;AACtF,oCAAoC,GAAG;AACvC,gCAAgC,CAAC;AACjC,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC;AACzB,wBAAwB,QAAQ,CAAC,WAAW,EAAE;AAC9C,4BAA4B,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AACpF,4BAA4B,UAAU,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE;AACvF,wBAAwB,GAAG;AAC3B;AACA,oBAAoB,CAAC;AACrB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC;AAC7D,oBAAoB,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,oBAAoB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AACtC,wBAAwB,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ;AAC3E,wBAAwB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,GAAG;AACjD,wBAAwB,EAAE,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC;AACzF,wBAAwB,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC;AAC5G,wBAAwB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACvD,4BAA4B,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;AACnD,4BAA4B,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AACxF,wBAAwB,GAAG;AAC3B,wBAAwB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3C,wBAAwB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1E,wBAAwB,QAAQ,CAAC,WAAW,EAAE;AAC9C,4BAA4B,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,EAAE;AACnF,wBAAwB,GAAG;AAC3B,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB;AACA,YAAY,MAAM,CAAC,OAAO,EAAE,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG;AACjF,YAAY,MAAM,CAAC,IAAI,GAAG,MAAM,GAAG;AACnC;AACA,YAAY,UAAU;AACtB,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,YAAY,CAAC;AAC3C,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG;AACvE;AACA,YAAY,EAAE,IAAI,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC;AACtH,YAAY,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE;AAChD;AACA,YAAY,GAAG,CAAC,WAAW,CAAC;AAC5B,YAAY,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAChC,oBAAoB,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC;AACrC,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAChC,oBAAoB,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC;AACrC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACxB;AACA,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG;AACtC,gBAAgB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,oBAAoB,GAAG,CAAC,UAAU,CAAC;AACnC,oBAAoB,EAAE,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC;AACnG,wBAAwB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC,YAAY,EAAE;AAC5F,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,OAAO;AAC7F,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,IAAI;AAC1F,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC;AAC5B,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG;AACpE,oBAAoB,CAAC;AACrB,oBAAoB,GAAG,CAAC,cAAc,CAAC;AACvC,oBAAoB,GAAG,CAAC,CAAC;AACzB,wBAAwB,cAAc,CAAC,CAAC,CAAC,UAAU,CAAC,IAAI,GAAG,qBAAqB,GAAG;AACnF,wBAAwB,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,OAAO;AAClI,wBAAwB,EAAE,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG;AAC9D,oBAAoB,CAAC;AACrB,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,wBAAwB,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,mBAAmB,CAAC,UAAU,EAAE;AAClF,oBAAoB,CAAC;AACrB;AACA,oBAAoB,YAAY,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,OAAO,EAAE;AAChE,gBAAgB,GAAG;AACnB;AACA,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG;AACtC;AACA,gBAAgB,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7F,oBAAoB,YAAY,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,YAAY,EAAE;AAC5E,oBAAoB,WAAW,CAAC,EAAE,CAAC,YAAY,CAAC,YAAY,IAAI;AAChE,gBAAgB,CAAC;AACjB,gBAAgB,EAAE,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG;AACzF;AACA,gBAAgB,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5E,oBAAoB,YAAY,CAAC,CAAC,CAAC,GAAG;AACtC,oBAAoB,YAAY,GAAG;AACnC;AACA,oBAAoB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACnE,wBAAwB,EAAE,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACrF,4BAA4B,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE;AAC7E,oBAAoB,CAAC;AACrB;AACA,oBAAoB,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACzF,wBAAwB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1C,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG;AACpC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAClE,oBAAoB,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACzC,oBAAoB,IAAI,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,EAAE;AAC5C,gBAAgB,CAAC;AACjB;AACA,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI;AAC1I,oBAAoB,GAAG;AACvB;AACA,gBAAgB,EAAE,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AACjF,gBAAgB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACjC,oBAAoB,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACtH,gBAAgB,CAAC;AACjB,gBAAgB,IAAI,CAAC,CAAC;AACtB,oBAAoB,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAChF,gBAAgB,CAAC;AACjB;AACA,gBAAgB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE;AACpH;AACA,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB;AACA,gBAAgB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,oBAAoB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,oBAAoB,IAAI,CAAC;AACzB,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,IAAI,GAAG,qBAAqB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;AAC7G,wBAAwB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC;AACvC;AACA,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACjF,4BAA4B,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,4BAA4B,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC;AAChD,wBAAwB,CAAC;AACzB;AACA,wBAAwB,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC;AAC1C,wBAAwB,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;AACnE;AACA,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI;AACtE,oBAAoB,GAAG;AACvB;AACA,gBAAgB,EAAE,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AACjF,gBAAgB,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAC/G;AACA,gBAAgB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;AAChE,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AACnC,gBAAgB,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM;AACvD,gBAAgB,WAAW;AAC3B,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,wBAAwB,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,qBAAqB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7E,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;AACvC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC;AACnC,YAAY,CAAC;AACb;AACA,YAAY,WAAW;AACvB,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,UAAU,CAAC;AAC1C,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAClF,QAAQ,GAAG;AACX;AACA,QAAQ,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG,EAAE;AAChD,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC1B,gBAAgB,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE;AAC1D,YAAY,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AACnC,gBAAgB,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE;AAC1D,YAAY,CAAC;AACb,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,YAAY,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AAC/C,gBAAgB,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1D,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1D,YAAY,CAAC;AACb,QAAQ,CAAC;AACT;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACrF,QAAQ,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACvF,QAAQ,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACvF,QAAQ,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACnG,QAAQ,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACrF,QAAQ,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC/F,QAAQ,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACnG,QAAQ,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACzF,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,eAAe,EAAE,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,IAAI;AACzG,QAAQ,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AAC3F,QAAQ,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AACnF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC5VF,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI;AAC3E,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK;AACzB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACzB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,SAAS,EAAE;AACzJ,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AAC5B,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;AACxD;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,WAAW,CAAC,KAAK,GAAG;AAChC;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG;AACrF;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC;AACxB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG;AAC1J,YAAY,IAAI;AAChB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG;AACvD;AACA,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;AACpF,gBAAgB,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AACvD;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK;AAC3H,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE,MAAM,GAAG;AACtG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACxD,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,SAAS;AACrB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,QAAQ,CAAC,UAAU,EAAE;AACzC,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACtC,wBAAwB,EAAE,CAAC,CAAC,EAAE;AAC9B,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACvD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AAC9D,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AACvF;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,GAAG,SAAS,GAAG,EAAE,CAAC,GAAG,EAAE;AACnE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI;AAClF,YAAY,IAAI,CAAC,IAAI,GAAG,MAAM,GAAG;AACjC;AACA,YAAY,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE;AACnG,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAC3G,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE;AAClJ,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAC,MAAM,CAAC;AAC7C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC3D,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB;AACA,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAC,MAAM,CAAC;AAC7C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAC,MAAM,CAAC;AAC7C,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAC,MAAM,CAAC;AAC7C,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AACvC,oBAAoB,QAAQ,CAAC,YAAY,EAAE;AAC3C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AAC7D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACxC,wBAAwB,OAAO,CAAC,CAAC,OAAO;AACxC,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAC,MAAM,CAAC;AAC7C,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,GAAG;AACnB;AACA,YAAY,IAAI;AAChB,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AACpE,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7I,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE;AACrD,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE;AAClJ,gBAAgB,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACjG,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/E;AACA,YAAY,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE;AACrD,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,4BAA4B,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG;AACrC,oBAAoB,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,EAAE;AACpD,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG;AACxH;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,SAAS,GAAG;AACzD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,EAAE,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC5E,IAAI,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC9D,QAAQ,SAAS;AACjB,YAAY,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC;AACvD,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,WAAW,CAAC;AAC1C,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACxC,QAAQ,SAAS;AACjB,YAAY,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE;AAC7C,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,CAAC;AACpC,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC9E,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF,QAAQ,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC5E,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC9F;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACzOF;AACA,EAAE,CAAC,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC;AACpD,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,aAAa,EAAE;AACrD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,oBAAoB,EAAE;AACtD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC;AACT;AACA;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AAC5B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,KAAK;AACzC,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;AACpB,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE;AAC1F,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,GAAG;AAClC,QAAQ,CAAC;AACT;AACA,IAAI,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE;AAC1C,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AACxD,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,aAAa,CAAC,KAAK,CAAC;AAC7B,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;AACxD;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,WAAW,CAAC,KAAK,GAAG;AAChC,YAAY,WAAW,CAAC,MAAM,CAAC,IAAI,EAAE;AACrC,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACrD,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACrD;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,kBAAkB,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;AAC1G,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,EAAE,GAAG,CAAC,KAAK,CAAC,QAAQ;AAChC,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AACjE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG;AAC9B,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG;AAC9B;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,kBAAkB,GAAG,IAAI,EAAE,IAAI,GAAG;AAC3F,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,kBAAkB,GAAG,MAAM,EAAE,CAAC,GAAG;AAClH,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,KAAK,CAAC,cAAc,EAAE;AAC7C;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F,gBAAgB,CAAC;AACjB;AACA,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC7C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC3E,YAAY,CAAC;AACb,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK;AACtC,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,gBAAgB;AAChC,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,eAAe,CAAC;AAC5C,oBAAoB,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/D,oBAAoB,CAAC,YAAY,CAAC,SAAS,CAAC;AAC5C,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE;AAC/B,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACtE,YAAY,CAAC;AACb,YAAY,IAAI;AAChB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,IAAI;AACvE;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,EAAE;AACnD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI;AACxE,YAAY,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE;AAC7C;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC5E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,UAAU,EAAE;AACjC,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,UAAU,EAAE;AACjC,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E,gBAAgB,IAAI,CAAC,eAAe,GAAG;AACvC;AACA,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,UAAU,CAAC,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AAC1E,gBAAgB,IAAI;AACpB,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAChD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACnG,wBAAwB,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,IAAI,EAAE;AAC7D,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;AAC9D,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;AACxD,wBAAwB,EAAE,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,wBAAwB,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,GAAG;AACvH,wBAAwB,OAAO,CAAC,IAAI,EAAE;AACtC,4BAA4B,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5C,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,UAAU,EAAE;AAChE,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;AACpE,4BAA4B,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC;AAC3D,wBAAwB,GAAG;AAC3B,oBAAoB,GAAG;AACvB;AACA,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,CAAC,CAAC,GAAG,WAAW,CAAC,UAAU,GAAG;AACnF,gBAAgB,gBAAgB,CAAC,OAAO;AACxC,oBAAoB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE;AACrD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE;AAC3B,wBAAwB,KAAK,CAAC,CAAC,MAAM,CAAC;AACtC,wBAAwB,KAAK,CAAC,CAAC,UAAU,CAAC;AAC1C,wBAAwB,MAAM,CAAC,CAAC,OAAO;AACvC,oBAAoB,KAAK;AACzB;AACA,gBAAgB,gBAAgB,CAAC,eAAe,CAAC,cAAc,EAAE;AACjE;AACA,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxE,gBAAgB,QAAQ,CAAC,WAAW,GAAG;AACvC,gBAAgB,IAAI,CAAC,eAAe,GAAG;AACvC,YAAY,GAAG;AACf;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,gBAAgB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACzC;AACA,gBAAgB,EAAE,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAC9E,oBAAoB,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,wBAAwB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3C,wBAAwB,IAAI,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,OAAO,EAAE,QAAQ,EAAE,CAAC,KAAK,EAAE;AAChF,wBAAwB,MAAM,CAAC,CAAC,CAAC;AACjC,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC/E,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C;AACA,gBAAgB,SAAS,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,EAAE;AACnD,YAAY,GAAG;AACf;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,cAAc,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,gBAAgB,EAAE,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC;AAC/F,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,gBAAgB,GAAG;AACnB,gBAAgB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC;AACA,gBAAgB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC/E,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB;AACA,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,kBAAkB,CAAC,SAAS,GAAG;AAC9D,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChE,QAAQ,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACrC,YAAY,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACvC,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK;AAC5B,QAAQ,EAAE;AACV,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/D,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChE,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACtB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC9C,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACxF,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,YAAY,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE;AAC9B,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpF,YAAY,kBAAkB,CAAC,CAAC,CAAC;AACjC,YAAY,WAAW,CAAC,KAAK,CAAC,kBAAkB,EAAE;AAClD,YAAY,KAAK,CAAC,QAAQ,CAAC,kBAAkB,EAAE;AAC/C,YAAY,KAAK,CAAC,QAAQ,CAAC,kBAAkB,EAAE;AAC/C,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAClD,QAAQ,GAAG;AACX,QAAQ,uBAAuB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrG,YAAY,uBAAuB,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7B,gBAAgB,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AACzC,YAAY,CAAC;AACb,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE;AACzC,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AACF;AACA;AACA,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM;AACtE,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrC,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,IAAI;AAClE;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;AAClE,IAAI,KAAK,CAAC,uBAAuB,CAAC,IAAI,EAAE;AACxC,IAAI,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACpE,QAAQ,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AAC1D,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC;AACpC,QAAQ,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG;AACpD,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AAC3D,QAAQ,MAAM,CAAC,EAAE,CAAC,CAAC;AACnB,YAAY,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;AACzE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;AACvB,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,IAAI,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AACxF,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AAC1F,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,IAAI,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AACvF,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AACtF,YAAY,GAAG,KAAK,GAAG;AACvB,IAAI,GAAG;AACP,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AACF;AACA,EAAE,CAAC,gBAAgB,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM;AACpF,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC5C,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,CAAC,MAAM,CAAC,cAAc,IAAI;AACzE;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;AAClE,IAAI,KAAK,CAAC,uBAAuB,CAAC,IAAI,EAAE;AACxC,IAAI,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACpE,QAAQ,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AAC1D,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC;AACpC,QAAQ,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG;AACpD,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AAC3D,QAAQ,MAAM,CAAC,EAAE,CAAC,CAAC;AACnB,YAAY,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;AACzE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;AACvB,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,IAAI,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AACxF,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AAC1F,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,IAAI,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AACvF,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC;AACtF,YAAY,GAAG,KAAK,GAAG;AACvB,IAAI,GAAG;AACP,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACnZF,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC/B,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAC/C,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAClD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,EAAE;AACrC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO;AAC3D,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO;AACvG,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC;AAChH,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;AACtJ,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;AAC1B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,cAAc,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,cAAc,EAAE,CAAC,CAAC,WAAW,EAAE;AACrH,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC;AAChE,QAAQ,CAAC;AACT;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG,IAAI,EAAE,IAAI,GAAG;AACvE,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG;AAC9F,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC;AAC3B,gBAAgB,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACjG,YAAY,IAAI;AAChB,gBAAgB,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAC5F;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,EAAE;AAClD,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC;AACnD;AACA,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AAC/D,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AACpF,YAAY,GAAG,CAAC,WAAW,CAAC;AAC5B;AACA,YAAY,GAAG,CAAC,WAAW,CAAC;AAC5B,YAAY,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAChC,oBAAoB,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC;AACrC,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAChC,oBAAoB,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC;AACrC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AACnC,gBAAgB,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE;AAC5C,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AAC7C,oBAAoB,CAAC,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrD,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAClC;AACA,gBAAgB,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG;AACjE,YAAY,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AAC3C,gBAAgB,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE;AAC1C,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AAC7C,oBAAoB,CAAC,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrD,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE;AACnC,gBAAgB,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG;AACjE;AACA,gBAAgB,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE;AACvC,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE;AAClD,oBAAoB,CAAC,QAAQ,EAAE,SAAS,IAAI,IAAI,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,IAAI,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,IAAI,IAAI,GAAG;AACnN,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI;AACtE;AACA,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,GAAG,GAAG;AACpE;AACA,gBAAgB,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE;AACrD,wBAAwB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG;AAC3D,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb;AACA,YAAY,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE;AACtC,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,KAAK,EAAE;AAC7C,gBAAgB,CAAC,IAAI,EAAE,KAAK,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC/C,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG;AACjC;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG;AAClE;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO;AACpG,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9C,oBAAoB,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ;AACvE,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,GAAG;AAC7C,oBAAoB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AACtC,wBAAwB,EAAE,CAAC,IAAI,CAAC,GAAG,OAAO,EAAE,CAAC,CAAC;AAC9C,4BAA4B,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClD,gCAAgC,EAAE,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC;AAClF,gCAAgC,EAAE,EAAE,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC;AAClE,gCAAgC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,GAAG;AACzF,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AACnD,4BAA4B,CAAC;AAC7B,4BAA4B,IAAI,CAAC,CAAC;AAClC,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACzD,gCAAgC,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,GAAG,CAAC,CAAC;AAC7F,oCAAoC,EAAE,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM;AACrG,oCAAoC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AAC7E,oCAAoC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,GAAG;AAC9F,gCAAgC,CAAC;AACjC,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AACtD,4BAA4B,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC1C,gCAAgC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AAC7D,gCAAgC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;AAC7G,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC;AAC5E,4BAA4B,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AACnD,gCAAgC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACzD,gCAAgC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC5D,gCAAgC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,GAAG;AACjG,gCAAgC,EAAE,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AACrG,oCAAoC,EAAE,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM;AACrG,oCAAoC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AAC7E,oCAAoC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACnE,wCAAwC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC;AACtF,oCAAoC,GAAG;AACvC,gCAAgC,CAAC;AACjC,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC;AACzB,wBAAwB,QAAQ,CAAC,WAAW,EAAE;AAC9C,4BAA4B,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AACpF,4BAA4B,UAAU,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE;AACvF,wBAAwB,GAAG;AAC3B;AACA,oBAAoB,CAAC;AACrB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC;AAC7D,oBAAoB,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE;AACjD,oBAAoB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AACtC,wBAAwB,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ;AAC3E,wBAAwB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,GAAG;AACjD,wBAAwB,EAAE,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC;AACzF,wBAAwB,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC;AAC5G,wBAAwB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACvD,4BAA4B,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;AACnD,4BAA4B,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AACxF,wBAAwB,GAAG;AAC3B,wBAAwB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3C,wBAAwB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1E,wBAAwB,QAAQ,CAAC,WAAW,EAAE;AAC9C,4BAA4B,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,EAAE;AACnF,wBAAwB,GAAG;AAC3B,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB;AACA,YAAY,MAAM,CAAC,OAAO,EAAE,EAAE,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG;AACjF,YAAY,MAAM,CAAC,IAAI,GAAG,MAAM,GAAG;AACnC;AACA,YAAY,UAAU;AACtB,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,YAAY,CAAC;AAC3C,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG;AACvE;AACA,YAAY,EAAE,IAAI,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC;AACtH,YAAY,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE;AAChD,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACxB;AACA,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG;AACtC,gBAAgB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,oBAAoB,GAAG,CAAC,UAAU,CAAC;AACnC,oBAAoB,EAAE,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC;AACnG,wBAAwB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC,YAAY,EAAE;AAC5F,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,OAAO;AAC7F,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,IAAI;AAC1F,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC;AAC5B,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG;AACpE,oBAAoB,CAAC;AACrB,oBAAoB,GAAG,CAAC,cAAc,CAAC;AACvC,oBAAoB,GAAG,CAAC,CAAC;AACzB,wBAAwB,cAAc,CAAC,CAAC,CAAC,UAAU,CAAC,IAAI,GAAG,qBAAqB,GAAG;AACnF,wBAAwB,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,OAAO;AAClI,wBAAwB,EAAE,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,GAAG;AAC9D,oBAAoB,CAAC;AACrB,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,wBAAwB,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,mBAAmB,CAAC,UAAU,EAAE;AAClF,oBAAoB,CAAC;AACrB;AACA,oBAAoB,YAAY,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,OAAO,EAAE;AAChE,gBAAgB,GAAG;AACnB;AACA,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG;AACtC,gBAAgB,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC;AACA,gBAAgB,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7F,oBAAoB,YAAY,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,YAAY,EAAE;AAC5E,oBAAoB,WAAW,CAAC,EAAE,CAAC,YAAY,CAAC,YAAY,IAAI;AAChE,gBAAgB,CAAC;AACjB,gBAAgB,EAAE,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG;AACzF;AACA,gBAAgB,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5E,oBAAoB,YAAY,CAAC,CAAC,CAAC,GAAG;AACtC,oBAAoB,YAAY,GAAG;AACnC;AACA,oBAAoB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACnE,wBAAwB,EAAE,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACrF,4BAA4B,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE;AAC7E,oBAAoB,CAAC;AACrB;AACA,oBAAoB,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACzF,wBAAwB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1C,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG;AACpC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAClE,oBAAoB,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACzC,oBAAoB,IAAI,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,EAAE;AAC5C,gBAAgB,CAAC;AACjB;AACA,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI;AAC1I,oBAAoB,GAAG;AACvB;AACA,gBAAgB,EAAE,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AACjF,gBAAgB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACjC,oBAAoB,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACtH,gBAAgB,CAAC;AACjB,gBAAgB,IAAI,CAAC,CAAC;AACtB,oBAAoB,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAChF,gBAAgB,CAAC;AACjB;AACA,gBAAgB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE;AACpH;AACA,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB;AACA,gBAAgB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,oBAAoB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,oBAAoB,IAAI,CAAC;AACzB,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,IAAI,GAAG,qBAAqB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;AAC7G,wBAAwB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC;AACvC;AACA,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACjF,4BAA4B,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,4BAA4B,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC;AAChD,wBAAwB,CAAC;AACzB;AACA,wBAAwB,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC;AAC1C,wBAAwB,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;AACnE;AACA,wBAAwB,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC3D,4BAA4B,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC1D,wBAAwB,CAAC;AACzB,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI;AACtE,oBAAoB,GAAG;AACvB;AACA,gBAAgB,EAAE,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AACjF,gBAAgB,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAC/G;AACA,gBAAgB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;AAChE,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AACnC,gBAAgB,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM;AACvD,gBAAgB,WAAW;AAC3B,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,wBAAwB,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,qBAAqB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7E,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;AACvC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE;AACpC;AACA,gBAAgB,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;AAC9D,gBAAgB,MAAM,CAAC,MAAM,EAAE,IAAI,IAAI,KAAK,CAAC,KAAK,EAAE;AACpD,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,EAAE;AAClD,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,GAAG,EAAE;AACzC,oBAAoB,EAAE,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,GAAG,EAAE;AAC9C,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;AACvC;AACA,gBAAgB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG;AACzD;AACA,gBAAgB,QAAQ;AACxB,gBAAgB,CAAC,UAAU,GAAG,QAAQ,CAAC,GAAG,CAAC;AAC3C,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC;AAC7C,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC;AAClE,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;AAChD,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;AAChD,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvD;AACA;AACA,YAAY,CAAC;AACb;AACA,YAAY,WAAW;AACvB,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,UAAU,CAAC;AAC1C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,YAAY,CAAC;AACpD,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,UAAU,EAAE;AAC7C,QAAQ,GAAG;AACX;AACA,QAAQ,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,EAAE,GAAG,EAAE;AAChD,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC1B,gBAAgB,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE;AACtD,YAAY,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC;AACnC,gBAAgB,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE;AACvD,YAAY,CAAC;AACb,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,YAAY,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AAC/C,gBAAgB,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACrE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7C,YAAY,CAAC;AACb,QAAQ,CAAC;AACT;AACA;AACA,QAAQ,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,YAAY,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;AAC/C,gBAAgB,MAAM,CAAC,CAAC,CAAC;AACzB,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,YAAY,CAAC;AACb,QAAQ,CAAC;AACT;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACrF,QAAQ,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACvF,QAAQ,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACvF,QAAQ,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACnG,QAAQ,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACrF,QAAQ,YAAY,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACnG,QAAQ,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC/F,QAAQ,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACzF,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,eAAe,EAAE,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,IAAI;AACzG,QAAQ,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AAC3F,QAAQ,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AACnF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC/XF;AACA,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC7B,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,EAAE,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACtC,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK;AAC5E,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK;AAC5F,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK;AAC5F,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO;AACjJ,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI;AAC/F,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACvE,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,aAAa;AACnE,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,SAAS,EAAE;AACpG,QAAQ,CAAC;AACT;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI;AACtC,QAAQ,CAAC,WAAW,EAAE,EAAE,CAAC,GAAG,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ;AAClH,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM;AAC9C,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC;AAChE,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE;AACA;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,OAAO,EAAE;AACpC,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,EAAE,IAAI,GAAG;AAC7E,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,GAAG;AAC3F,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AAC1D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,OAAO;AACnB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,EAAE;AACzC;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG;AAC7D,YAAY,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE;AACtC;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,GAAG;AAC3D,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AAClE,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7E;AACA,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AAC9F,YAAY,WAAW;AACvB,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AAC9F;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AACzE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AAC9E,YAAY,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE;AAC3F,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAC7C;AACA,YAAY,MAAM,CAAC,IAAI,GAAG,MAAM,GAAG;AACnC;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;AACjE,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnE,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AACrE,YAAY,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE;AAC/D,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAClF;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;AAC5D,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI;AACvJ,YAAY,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AAC5C,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,oBAAoB,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AACxC,wBAAwB,CAAC,WAAW,CAAC,WAAW,CAAC;AACjD,wBAAwB,CAAC,OAAO,CAAC,OAAO,CAAC;AACzC,wBAAwB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACtF,wBAAwB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACvF,wBAAwB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvI,wBAAwB,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI;AACtH,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AAChD,gBAAgB,GAAG;AACnB,YAAY,MAAM,CAAC,IAAI,GAAG,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;AACnD,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE;AACrE,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,oBAAoB,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AACxC,wBAAwB,CAAC,WAAW,CAAC,WAAW,CAAC;AACjD,wBAAwB,CAAC,OAAO,CAAC,OAAO,CAAC;AACzC,wBAAwB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACrF,wBAAwB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACtF,wBAAwB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtI,wBAAwB,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI;AACtH,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AAChD,gBAAgB,GAAG;AACnB;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;AAC5D,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AACzD;AACA,YAAY,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AAC5C,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE;AAC1B,oBAAoB,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AACjC,oBAAoB,CAAC,WAAW,CAAC,WAAW,CAAC;AAC7C,oBAAoB,CAAC,OAAO,CAAC,OAAO,CAAC;AACrC,oBAAoB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAClF,oBAAoB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAClF,YAAY,EAAE;AACd;AACA,YAAY,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE;AACrE,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE;AAC1B,oBAAoB,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AACjC,oBAAoB,CAAC,WAAW,CAAC,WAAW,CAAC;AAC7C,oBAAoB,CAAC,OAAO,CAAC,OAAO,CAAC;AACrC,oBAAoB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACjF,oBAAoB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACjF,YAAY,EAAE;AACd;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,GAAG;AAChD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM;AAC1B,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,QAAQ,GAAG,CAAC,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG;AACrG,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,GAAG,CAAC,QAAQ,CAAC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG;AAC7G,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,CAAC,QAAQ,GAAG,CAAC,QAAQ,CAAC,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG;AAC3G;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,WAAW,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACnG,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACvC,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE;AACnC,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,YAAY,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AACzB,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,YAAY,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AACzB,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE;AACjC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE;AAC5C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACpOF,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAClC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAChC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,oBAAoB,EAAE;AACtD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG;AACnD,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAChC,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,KAAK;AACzC,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK;AAC7B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE;AACzG,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC;AACT;AACA,IAAI,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AAChD,IAAI,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE;AAC1C,IAAI,KAAK,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,EAAE;AACrC;AACA,IAAI,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,IAAI,GAAG,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,gBAAgB,CAAC,OAAO,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5D,QAAQ,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,IAAI,GAAG,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,IAAI,GAAG;AACP;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrE,YAAY,EAAE;AACd,QAAQ,EAAE;AACV,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AAC3C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,QAAQ,EAAE;AACV,IAAI,EAAE;AACN;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AAClC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3H,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,oBAAoB,SAAS,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE;AAC5C,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE;AAC1E,gBAAgB,CAAC;AACjB,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACxD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ;AAC5C,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG;AAC5E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AACjE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACxG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AAClD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACtD,YAAY,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3D,gBAAgB,OAAO,CAAC,MAAM,EAAE;AAChC,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AAC/B;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,GAAG,IAAI,EAAE,IAAI,GAAG;AAClF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,GAAG,MAAM,EAAE,CAAC,GAAG;AACzG,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AAC1E,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,MAAM,EAAE,IAAI,GAAG;AACjF,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACjE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACjE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AACjE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AACnE;AACA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AAChF;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,KAAK,CAAC,cAAc,EAAE;AAC7C;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;AAClD,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AACjD,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC,MAAM;AAClF,gBAAgB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;AACtD,oBAAoB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACvE,wBAAwB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACrD,wBAAwB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACnI,oBAAoB,CAAC;AACrB;AACA,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AACjD,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAChF,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK;AACtC,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,gBAAgB;AAChC,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,eAAe,CAAC;AAC5C,oBAAoB,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/D,oBAAoB,CAAC,YAAY,CAAC,SAAS,CAAC;AAC5C,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE;AAC/B,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACtE,YAAY,CAAC;AACb;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AACrD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxE;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,EAAE;AACrD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI;AACzE;AACA;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACtC,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3E,oBAAoB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AACnD,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,MAAM,CAAC,IAAI;AAC1B,YAAY,8DAA8D;AAC1E,YAAY,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AACpC,cAAc,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACnD,kBAAkB,CAAC,UAAU,EAAE;AAC/B,kBAAkB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACrC,kBAAkB,CAAC,IAAI,CAAC,KAAK,CAAC;AAC9B,gBAAgB,CAAC;AACjB,cAAc,CAAC;AACf,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AACpC,cAAc,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACnD,kBAAkB,CAAC,UAAU,EAAE;AAC/B,kBAAkB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACrC,kBAAkB,CAAC,IAAI,CAAC,KAAK,CAAC;AAC9B,gBAAgB,CAAC;AACjB,cAAc,CAAC;AACf,YAAY,CAAC;AACb;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AAC/C,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AAC3E;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK;AAC3B,YAAY,8DAA8D;AAC1E,YAAY,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC;AAC9B,gBAAgB,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE;AACtC,gBAAgB,WAAW,GAAG;AAC9B,gBAAgB,WAAW,GAAG;AAC9B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,KAAK,CAAC,KAAK,CAAC,cAAc,EAAE;AAC5C,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI;AACtH,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG;AAC5E,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG;AAC1F,gBAAgB,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE;AACpC,oBAAoB,OAAO,CAAC,MAAM,EAAE;AACpC,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AACzC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E,gBAAgB,KAAK,CAAC,eAAe,GAAG;AACxC,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,UAAU,CAAC,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AAC1E,gBAAgB,IAAI;AACpB,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC;AAC1E,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG;AACvI,wBAAwB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChF,4BAA4B,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AACpI,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AACpF,4BAA4B,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC;AACxD,gCAAgC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AAClG,4BAA4B,CAAC,CAAC,IAAI,CAAC,CAAC;AACpC,gCAAgC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AAClG,4BAA4B,CAAC;AAC7B,wBAAwB,GAAG;AAC3B;AACA,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACnG,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,aAAa,CAAC,UAAU,EAAE;AAC9D,wBAAwB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,UAAU,EAAE;AACvE,wBAAwB,EAAE,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACnD,4BAA4B,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,IAAI,EAAE;AACtE,wBAAwB,CAAC;AACzB,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;AACxD,wBAAwB,EAAE,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,wBAAwB,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,GAAG;AACvH,wBAAwB,OAAO,CAAC,IAAI,EAAE;AACtC,4BAA4B,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5C,4BAA4B,KAAK,CAAC,CAAC,WAAW,CAAC;AAC/C,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;AACpE,4BAA4B,IAAI,CAAC,CAAC,KAAK;AACvC,wBAAwB,GAAG;AAC3B,oBAAoB,GAAG;AACvB,gBAAgB,EAAE,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC;AAC3F,gBAAgB,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,oBAAoB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE;AACjE,oBAAoB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,GAAG,CAAC,GAAG;AACzG,oBAAoB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC;AACxD,oBAAoB,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,MAAM,CAAC,SAAS,EAAE;AAC5H,oBAAoB,EAAE,CAAC,CAAC,gBAAgB,CAAC,GAAG,CAAC,IAAI,CAAC;AAClD,wBAAwB,OAAO,CAAC,gBAAgB,EAAE,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnE,gBAAgB,CAAC;AACjB;AACA,gBAAgB,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3D,oBAAoB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE;AACrE,gBAAgB,EAAE;AAClB;AACA,gBAAgB,gBAAgB,CAAC,OAAO;AACxC,oBAAoB,CAAC,cAAc,CAAC,gBAAgB,CAAC,OAAO,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,qBAAqB,CAAC;AACvG,oBAAoB,CAAC,IAAI,EAAE;AAC3B,wBAAwB,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,UAAU,CAAC,EAAE;AACnE,wBAAwB,KAAK,CAAC,CAAC,UAAU,CAAC;AAC1C,wBAAwB,MAAM,CAAC,CAAC,OAAO;AACvC,oBAAoB,KAAK;AACzB;AACA,gBAAgB,gBAAgB,CAAC,eAAe,CAAC,cAAc,EAAE;AACjE;AACA,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AACjD;AACA,gBAAgB,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,oBAAoB,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAC5C,gBAAgB,GAAG,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,oBAAoB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACnG,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;AAC1D,oBAAoB,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,MAAM,CAAC;AAC7D,oBAAoB,EAAE,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,GAAG;AAC5H,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,GAAG;AAC3E,oBAAoB,OAAO,CAAC,IAAI,EAAE;AAClC,wBAAwB,KAAK,CAAC,CAAC,KAAK,CAAC;AACrC,wBAAwB,UAAU,CAAC,CAAC,UAAU,CAAC;AAC/C,wBAAwB,GAAG,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,IAAI,EAAE;AACpD,wBAAwB,WAAW,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC;AACxD,wBAAwB,MAAM,CAAC,CAAC,MAAM;AACtC,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,gBAAgB,KAAK,CAAC,QAAQ,CAAC,YAAY,CAAC,OAAO,EAAE;AACrD,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxE,gBAAgB,KAAK,CAAC,eAAe,GAAG;AACxC,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7F,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB;AACA,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,SAAS;AACxB,YAAY,8DAA8D;AAC1E;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE;AAC7E,YAAY,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AACpC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,oBAAoB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC;AACtC,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC;AACtC,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE;AACxE,oBAAoB,CAAC,KAAK,CAAC;AAC3B,oBAAoB,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpE,wBAAwB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,4BAA4B,MAAM,CAAC,CAAC;AACpC,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC3C,gCAAgC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAC7C,gCAAgC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AACnD,gCAAgC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,oCAAoC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AACtG,gCAAgC,GAAG;AACnC,gCAAgC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc;AAChE,4BAA4B,EAAE;AAC9B,wBAAwB,EAAE;AAC1B,gBAAgB,EAAE;AAClB,gBAAgB,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE;AAC3E;AACA,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AAC3C,gBAAgB,WAAW,GAAG;AAC9B,gBAAgB,WAAW,GAAG;AAC9B,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,SAAS,CAAC,SAAS,GAAG;AACrD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACjE,QAAQ,EAAE,EAAE,GAAG,CAAC,MAAM,CAAC,cAAc,EAAE;AACvC,YAAY,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AAC5C,QAAQ,CAAC;AACT,IAAI,GAAG;AACP;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;AAC9B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;AAC9B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC9C,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AACvG,QAAQ,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC7F,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;AACzD,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,KAAK;AACvG,QAAQ,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,SAAS,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,KAAK;AAC/G,QAAQ,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,SAAS,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,KAAK;AAC/G,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,WAAW,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,WAAW,CAAC,CAAC,KAAK;AAC7G;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC7E,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC;AACzF,YAAY,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC;AAC1F,YAAY,KAAK,CAAC,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC;AACxF,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,YAAY,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE;AAC/B,YAAY,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE;AAC/B,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,WAAW,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,KAAK,CAAC,WAAW,CAAC,CAAC,EAAE;AACjC,YAAY,KAAK,CAAC,WAAW,CAAC,CAAC,EAAE;AACjC,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpF,YAAY,KAAK,CAAC,UAAU,CAAC,CAAC,EAAE;AAChC,YAAY,KAAK,CAAC,WAAW,CAAC,CAAC,EAAE;AACjC,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpF,YAAY,KAAK,CAAC,UAAU,CAAC,CAAC,EAAE;AAChC,YAAY,KAAK,CAAC,WAAW,CAAC,CAAC,EAAE;AACjC,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC9D,QAAQ,GAAG;AACX,QAAQ,uBAAuB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrG,YAAY,uBAAuB,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AACzC,gBAAgB,KAAK,CAAC,UAAU,CAAC,KAAK,EAAE;AACxC,YAAY,CAAC;AACb,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE;AAC1C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AACF;AACA,EAAE,CAAC,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC3C,EAAE,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE;AAC9B,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE;AAC3B,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,EAAE;AACzB,EAAE;ACxiBF,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACzC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAChC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,aAAa,EAAE;AAC1C,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,aAAa,EAAE;AAC3C,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACnC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE;AAChC,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC7D,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AAC5B,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK;AAChC,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI;AAC/B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;AAC1B,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AAC5B,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,EAAE;AACZ,QAAQ,CAAC,CAAC,EAAE;AACZ,QAAQ,CAAC,CAAC,EAAE;AACZ,QAAQ,CAAC,CAAC,EAAE;AACZ,QAAQ,CAAC,CAAC,EAAE;AACZ,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE;AACvE,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE;AAC7C,QAAQ,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE;AAC/C,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,KAAK;AAClC,QAAQ,CAAC;AACT;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE;AACzB,IAAI,MAAM,CAAC,WAAW,CAAC,KAAK,EAAE;AAC9B,IAAI,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;AAC5E,IAAI,MAAM,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG;AACrD,IAAI,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE;AAC1C,IAAI,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC1B,IAAI,MAAM,CAAC,MAAM,EAAE,KAAK,GAAG;AAC3B,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE;AAC3C,IAAI,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAC1B,IAAI,MAAM,CAAC,MAAM,EAAE,KAAK,GAAG;AAC3B;AACA,IAAI,OAAO,CAAC,aAAa,CAAC,IAAI,EAAE,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChE,QAAQ,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAClC,QAAQ,MAAM,CAAC,gBAAgB;AAC/B,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC;AAC7C,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC;AAC7C,IAAI,CAAC;AACL;AACA,IAAI,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACnC,QAAQ,MAAM,CAAC,gBAAgB;AAC/B,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC;AAC7C,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC;AAC7C,IAAI,CAAC;AACL;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE;AACpE,YAAY,EAAE;AACd,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AAC3C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,MAAM,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC1C,QAAQ,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC;AAC/B,MAAM,GAAG;AACT,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;AACtF,oBAAoB,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE;AACtD,gBAAgB,gBAAgB,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC;AAC9E;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,kBAAkB,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE;AAC3G,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACxD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,gBAAgB;AACpD,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AACpF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,GAAG;AAChI;AACA,YAAY,EAAE,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC;AACvD,gBAAgB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG;AAClC,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACnC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,GAAG;AAChC;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK;AAC9E,YAAY,EAAE,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACrE,YAAY,EAAE,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACrE;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI;AAC9B,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAClG,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI;AAC9B,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAClG,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB;AACA,YAAY,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,cAAc,GAAG;AACzC;AACA,YAAY,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClG,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,cAAc,GAAG;AAC5C;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG,IAAI,EAAE,IAAI,GAAG;AACpF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG,MAAM,EAAE,CAAC,GAAG;AAC3G,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AACrC,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AAC1E,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACjE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AAClE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AAClE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAChE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AACjE;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;AACtD,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,GAAG;AAC9E,YAAY,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACnE,YAAY,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AACpE,YAAY,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AACpE,YAAY,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAClE,YAAY,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AACnE,YAAY,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,eAAe,GAAG;AACzE,YAAY,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AACpE;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,8DAA8D;AAC1E;AACA,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC;AACvF,gBAAgB,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE;AACA,gBAAgB,MAAM,CAAC,KAAK,CAAC,WAAW,EAAE;AAC1C;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACtD,wBAAwB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC;AAChH,wBAAwB,EAAE,CAAC,gBAAgB,CAAC,CAAC,CAAC;AAC9C,4BAA4B,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,kBAAkB,EAAE;AACtH,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,mBAAmB,EAAE;AACtH,wBAAwB,CAAC;AACzB,wBAAwB,MAAM,CAAC,MAAM,CAAC;AACtC,oBAAoB,GAAG;AACvB,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,EAAE,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG;AACrF,oBAAoB,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;AACzG,gBAAgB,CAAC;AACjB;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAClG,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,UAAU;AACrD,YAAY,8DAA8D;AAC1E;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK;AACnD,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,GAAG,KAAK,EAAE,OAAO,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AACvF;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,gBAAgB,CAAC;AACzC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,oBAAoB,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,GAAG;AAC3D,gBAAgB,IAAI;AACpB,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,gBAAgB,CAAC;AACzC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,oBAAoB,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,GAAG;AAC5D,gBAAgB,IAAI;AACpB;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AAChE,gBAAgB,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,CAAC,MAAM,CAAC,CAAC,GAAG;AAChC,gBAAgB,GAAG;AACnB,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE;AAClE,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/C,uBAAuB,EAAE,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC;AACvC,uBAAuB,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC5D,yBAAyB,MAAM,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACnD,uBAAuB,IAAI;AAC3B;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,EAAE;AACnC,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK;AAC7G;AACA,YAAY,SAAS,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,EAAE;AAC/C,YAAY,UAAU,CAAC,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE;AACjD;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,QAAQ;AAClD,YAAY,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AACjC,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE;AAC7E,oBAAoB,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC,CAAC,EAAE;AACpD,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACrD,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC7E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,UAAU,EAAE;AAClE,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AACjC,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,KAAK,CAAC,EAAE,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACpD,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,KAAK,CAAC,EAAE,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACpD,oBAAoB,CAAC,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AACjH;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACtD,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC7E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACtD,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC7E;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG,UAAU,EAAE;AACnE,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG,UAAU,EAAE;AACnE,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,KAAK;AAC1B,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,KAAK,EAAE,CAAC,OAAO,EAAE;AAC7C;AACA,YAAY,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,WAAW,EAAE;AACvD;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,eAAe,GAAG,SAAS,EAAE,CAAC,EAAE;AACxE,gBAAgB,CAAC,IAAI,EAAE,WAAW,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,KAAK;AACvD;AACA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE;AAC9C,gBAAgB,CAAC,MAAM,EAAE,CAAC,GAAG;AAC7B;AACA,YAAY,YAAY,CAAC,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,EAAE;AACtC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,gBAAgB,EAAE;AAClD;AACA,YAAY,YAAY,CAAC,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,gBAAgB,EAAE;AAClD;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE;AACnD,gBAAgB,CAAC,IAAI,CAAC,KAAK,EAAE;AAC7B,YAAY,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE;AACpC,gBAAgB,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAChC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,gBAAgB,EAAE;AAClD,YAAY,MAAM,CAAC,SAAS,GAAG,MAAM,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,CAAC,EAAE,CAAC,UAAU,EAAE;AAC7E;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AACzC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AACvE,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,SAAS;AACxB,YAAY,8DAA8D;AAC1E;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE;AAC7E,YAAY,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AACpC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,CAAC,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,gBAAgB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,oBAAoB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,YAAY,CAAC;AACb;AACA;AACA,YAAY,QAAQ,CAAC,aAAa,EAAE,CAAC,CAAC;AACtC,gBAAgB,EAAE,CAAC,EAAE,KAAK,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,WAAW,EAAE;AAC9D,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,IAAI,EAAE,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE;AACtE,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,wBAAwB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG,CAAC,EAAE;AACjE,4BAA4B,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG;AAClE,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,EAAE;AACvD,4BAA4B,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE;AAC3E;AACA,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,KAAK,EAAE;AACxD,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG;AAChD,4BAA4B,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,EAAE;AAC5E,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;AAChC,gBAAgB,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACpE,gBAAgB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACtE,gBAAgB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG;AAC/D,gBAAgB,aAAa,GAAG;AAChC;AACA,gBAAgB,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK;AACtD,gBAAgB,IAAI;AACpB,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,gBAAgB,CAAC;AAC7C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtD,oBAAoB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,GAAG,CAAC,IAAI;AAC1F;AACA,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,gBAAgB,CAAC;AAC7C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtD,oBAAoB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,GAAG,CAAC,IAAI;AAC3F;AACA,gBAAgB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AACtE,oBAAoB,CAAC,KAAK,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,KAAK,CAAC,CAAC;AAC7D,wBAAwB,QAAQ;AAChC,4BAA4B,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,gCAAgC,MAAM,CAAC,CAAC;AACxC,oCAAoC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC/C,oCAAoC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3E,wCAAwC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AACxG,oCAAoC,EAAE;AACtC,gCAAgC,CAAC;AACjC,4BAA4B,EAAE;AAC9B,gBAAgB,EAAE;AAClB;AACA,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE;AACxE,oBAAoB,CAAC,KAAK,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,KAAK,CAAC,CAAC;AACnE,2BAA2B,SAAS;AACpC,2BAA2B,CAAC,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,2BAA2B,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,gCAAgC,MAAM,CAAC,CAAC;AACxC,oCAAoC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACjD,oCAAoC,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;AAC/D,oCAAoC,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;AAC/D,oCAAoC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC/C,oCAAoC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3E,wCAAwC,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AAC1G,oCAAoC,EAAE;AACtC,gCAAgC,CAAC;AACjC,4BAA4B,EAAE;AAC9B,gBAAgB,EAAE;AAClB;AACA,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI;AAC7C,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC;AAC3D,oBAAoB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG;AACtC,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG;AACvC,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,EAAE,gBAAgB,CAAC,CAAC,CAAC,EAAE;AACpD;AACA,gBAAgB,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,KAAK;AAC5E;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,UAAU,GAAG,QAAQ,CAAC,kBAAkB,CAAC;AACnF,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC;AACA,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK;AACrD,gBAAgB,aAAa,CAAC,UAAU,GAAG,QAAQ,CAAC,kBAAkB,EAAE,IAAI,CAAC,IAAI,EAAE;AACnF,gBAAgB,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,kBAAkB,EAAE,IAAI,CAAC,KAAK,EAAE;AACrF;AACA,gBAAgB,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI;AACvD,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACnD,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC7E;AACA,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,KAAK,CAAC,EAAE,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC7E,oBAAoB,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,KAAK,CAAC,EAAE,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AAC9E;AACA,gBAAgB,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AACxD,gBAAgB,EAAE,EAAE,gBAAgB,CAAC,CAAC,CAAC;AACvC,oBAAoB,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AAC9E,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AAC/E,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AAChD,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvF;AACA,gBAAgB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,WAAW,CAAC;AAC9E,gBAAgB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC;AAC9E;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACpD,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,SAAS,EAAE;AACjD,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACpD,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,SAAS,CAAC;AAChD,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC5E;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG,UAAU,GAAG,QAAQ,CAAC,kBAAkB,CAAC;AAC9F,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG,UAAU,GAAG,QAAQ,CAAC,kBAAkB,CAAC;AAC9F,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC,YAAY,CAAC;AACb;AACA,YAAY,OAAO,GAAG;AACtB;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACjE,QAAQ,OAAO;AACf,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC1B,YAAY,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,MAAM,CAAC,YAAY,GAAG,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9D,YAAY,EAAE;AACd,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;AACtB,YAAY,CAAC,MAAM,CAAC,KAAK,EAAE;AAC3B,IAAI,GAAG;AACP;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5B,IAAI,GAAG;AACP;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChE,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACxC,QAAQ,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACvC,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK;AAC5B,QAAQ,EAAE;AACV,QAAQ,OAAO;AACf,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAC;AACxB,YAAY,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,gBAAgB,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7D,YAAY,EAAE;AACd,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;AACtB,YAAY,CAAC,MAAM,CAAC,KAAK,EAAE;AAC3B,IAAI,GAAG;AACP;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/D,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChE,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AACtB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AAC1G,QAAQ,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AAC1G,QAAQ,kBAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,IAAI;AACtH,QAAQ,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,mBAAmB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,mBAAmB,CAAC,CAAC,IAAI;AACzH;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzE,YAAY,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;AAC7E,YAAY,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;AAC/E,YAAY,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC;AAChF,YAAY,OAAO,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC;AAC9E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjF,YAAY,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE;AACtB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE;AACtB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,GAAG;AACX,QAAQ,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC1F,YAAY,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK;AACnD,YAAY,EAAE,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,gBAAgB,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC;AAChC,gBAAgB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAChC,gBAAgB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;AAC5B;AACA,gBAAgB,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC;AAChC,gBAAgB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAChC,gBAAgB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;AAC5B,YAAY,CAAC;AACb,YAAY,gBAAgB,CAAC,CAAC,CAAC;AAC/B;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAClC,YAAY,MAAM,CAAC,MAAM,EAAE,KAAK,GAAG;AACnC,YAAY,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG;AAClC,YAAY,MAAM,CAAC,MAAM,EAAE,KAAK,GAAG;AACnC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE;AAC1C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC5pBF;AACA,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACjC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE;AAChC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC,MAAM;AACnJ,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACzB,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK;AACzB,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,UAAU,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ;AAChH,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;AAC1B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AACrG,QAAQ,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,uBAAuB,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ;AAC5H,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,SAAS,EAAE;AACzJ,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM;AAC9C,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC;AAChE,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,YAAY,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ;AACzE,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACpF,oBAAoB,MAAM,CAAC,IAAI,CAAC;AAChC,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,EAAE;AACd;AACA,YAAY,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;AACrD,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxD,wBAAwB,MAAM,CAAC,CAAC;AAChC,4BAA4B,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,4BAA4B,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,4BAA4B,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AAC7C,4BAA4B,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE;AACtC,wBAAwB,GAAG;AAC3B,gBAAgB,IAAI;AACpB;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC1B,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE;AAC9C,oBAAoB,CAAC,MAAM,CAAC,WAAW,CAAC;AACxC,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;AAC3D,oBAAoB,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,gBAAgB,EAAE,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,EAAE;AAC7D;AACA,gBAAgB,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACnD,oBAAoB,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI;AACrE,oBAAoB,EAAE,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;AAC9C,wBAAwB,IAAI,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,CAAC,iBAAiB,GAAG;AACzE,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AAC5C,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC;AAC5B,wBAAwB,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,YAAY,CAAC,OAAO;AACrF,wBAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE;AACjE,4BAA4B,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/D,gCAAgC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC;AAClE,gCAAgC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,4BAA4B,GAAG;AAC/B,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,gBAAgB,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC;AAC9B,YAAY,CAAC;AACb,YAAY,EAAE,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS;AACvE,YAAY,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,gBAAgB,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACvD,oBAAoB,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3C,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ;AAC/C,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,gBAAgB,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/C,wBAAwB,EAAE,CAAC,EAAE,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC,CAAC;AACtD,4BAA4B,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC/C,4BAA4B,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACnD,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AACzC,gCAAgC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;AAC/C,gCAAgC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAC3D,4BAA4B,CAAC,CAAC,IAAI;AAClC,4BAA4B,CAAC;AAC7B,gCAAgC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC;AACxD,gCAAgC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAC3D,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC;AACzB;AACA,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,OAAO;AACpF,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS;AAChH,gBAAgB,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3C,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AAC1F,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB;AACA,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACrF,gBAAgB,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,EAAE,CAAC,YAAY,EAAE;AACzE;AACA,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChF,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,gBAAgB,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS;AACxE,gBAAgB,EAAE,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC,CAAC;AAC3D,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACjC,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC;AAC5B,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB,gBAAgB,MAAM,CAAC,MAAM,CAAC;AAC9B,YAAY,GAAG,MAAM,CAAC,MAAM,GAAG;AAC/B,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AACnD;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK;AAC3H,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,GAAG,IAAI,EAAE,IAAI,GAAG;AACjF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC/F,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AAC1D,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACjD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACxD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AACjF;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AACzE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AAC5E,YAAY,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAC7C;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,WAAW;AAC5C,gBAAgB,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,GAAG,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,YAAY,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE;AAC5G,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,oBAAoB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC1C,oBAAoB,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAClC,wBAAwB,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC,CAAC;AAC7E,4BAA4B,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE;AAC5C,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,oBAAoB,MAAM,CAAC,IAAI,CAAC;AAChC,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,MAAM,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,cAAc,CAAC,KAAK,CAAC;AACrC,gBAAgB,cAAc,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3E,oBAAoB,MAAM,CAAC,KAAK,CAAC;AACjC,gBAAgB,GAAG;AACnB,YAAY,MAAM;AAClB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;AACjE,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnE,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACtE,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE;AACpD;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,GAAG,EAAE;AACtD,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACtG,YAAY,IAAI,CAAC,IAAI,GAAG,MAAM,GAAG;AACjC;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACvD,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,GAAG;AACjH,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,wBAAwB,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxG,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;AAC/G,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC;AACtC,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;AACnI,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE;AACrG,gBAAgB,CAAC;AACjB,YAAY,IAAI;AAChB,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE;AAC1E,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC3E,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI;AACnG,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC3D,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AACvC,oBAAoB,QAAQ,CAAC,YAAY,EAAE;AAC3C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AAC7D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACxC,wBAAwB,OAAO,CAAC,CAAC,OAAO;AACxC,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,GAAG;AACnB,YAAY,IAAI;AAChB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,GAAG;AAC7G,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE;AACjG;AACA,YAAY,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC3B,gBAAgB,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG;AAC/E,gBAAgB,IAAI;AACpB,oBAAoB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,EAAE,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE,QAAQ,GAAG,CAAC,EAAE;AACvM,oBAAoB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,EAAE,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE,QAAQ,GAAG,CAAC,GAAG;AAC1M,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;AAC9B,gBAAgB,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE;AACtF,oBAAoB,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC;AACpE,oBAAoB,GAAG;AACvB,YAAY,EAAE,CAAC,CAAC,OAAO,EAAE;AACzB,gBAAgB,YAAY;AAC5B,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,wBAAwB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,wBAAwB,EAAE,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM;AAC/E,wBAAwB,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AACpD,4BAA4B,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC3C,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,gCAAgC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,4BAA4B,CAAC,CAAC,IAAI,CAAC,CAAC;AACpC,gCAAgC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9D,oCAAoC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gCAAgC,CAAC,CAAC,IAAI,CAAC,CAAC;AACxC,oCAAoC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9D,gCAAgC,CAAC;AACjC,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC;AACzB,wBAAwB,MAAM,CAAC,IAAI,CAAC;AACpC,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AACpD,4BAA4B,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE;AAChF,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACnF,wBAAwB,CAAC;AACzB,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,wBAAwB,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AACnD,4BAA4B,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AAC3E,4BAA4B,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,iBAAiB,EAAE;AACnE,gCAAgC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,IAAI,iBAAiB,CAAC,CAAC,EAAE;AACzG,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC;AACzB,wBAAwB,MAAM,CAAC,KAAK,CAAC;AACrC,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnD,wBAAwB,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AACpD,4BAA4B,MAAM,CAAC,CAAC,CAAC,SAAS,GAAG;AACjD,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AACjF,4BAA4B,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,iBAAiB,EAAE;AAC5E,4BAA4B,EAAE,CAAC,SAAS,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AAC/F,4BAA4B,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS;AACjD,4BAA4B,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACpE,gCAAgC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,IAAI,iBAAiB,CAAC,CAAC,EAAE;AAC5E,4BAA4B,CAAC;AAC7B,4BAA4B,MAAM,CAAC,KAAK,CAAC;AACzC,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb,YAAY,IAAI,CAAC,CAAC;AAClB,gBAAgB,YAAY;AAC5B,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,wBAAwB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AACtE,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AAC/D,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,wBAAwB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,4BAA4B,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,gCAAgC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,4BAA4B,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,4BAA4B,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9E,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,UAAU,CAAC,YAAY;AACnF,YAAY,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;AAC5C,gBAAgB,eAAe,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC;AACxD,YAAY,CAAC;AACb;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,QAAQ,CAAC,SAAS,GAAG;AACpD;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC9E,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC9F,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF,QAAQ,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC5E,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF,QAAQ,YAAY,EAAE,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AAChG,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC9F;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACvD,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC7aF,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,EAAE;AACvC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,oBAAoB,EAAE;AACtD,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACvC,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;AAC5B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK;AAC/E,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK;AAC/B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AAC5B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE;AAC3E,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrE,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,KAAK;AACzC,QAAQ,CAAC;AACT;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,aAAa;AAC7E;AACA,IAAI,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE;AAC5B,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE;AACzB,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC;AACvB,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC;AAC1B,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AAC7C,IAAI,CAAC;AACL,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACrD,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,GAAG;AACtC,IAAI,CAAC;AACL;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,gBAAgB,CAAC,OAAO;AAC5B,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAChE,QAAQ,EAAE;AACV,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,gBAAgB,CAAC,OAAO;AAC5B,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAChE,QAAQ,EAAE;AACV,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,gBAAgB,CAAC,OAAO;AAC5B,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AACrE,gBAAgB,OAAO,CAAC,CAAC,OAAO;AAChC,YAAY,EAAE;AACd,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC;AAC5C,gBAAgB,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;AACxC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AAC3C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,QAAQ,EAAE;AACrC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnC,oBAAoB,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1C,gBAAgB,IAAI;AACpB,oBAAoB,SAAS,CAAC,UAAU,EAAE;AAC1C,wBAAwB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AAC3C,wBAAwB,CAAC,IAAI,CAAC,KAAK,EAAE;AACrC,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACxD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,gBAAgB;AACpD,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AACjE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,GAAG;AAClC,YAAY,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,GAAG;AAClC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,kBAAkB,GAAG,IAAI,EAAE,IAAI,GAAG;AAC3F,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,kBAAkB,GAAG,MAAM,EAAE,CAAC,GAAG;AAClH,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,YAAY,GAAG;AAChE,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,YAAY,IAAI;AAC9D;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F,gBAAgB,CAAC;AACjB;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AACjG,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,QAAQ;AACvB,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,iBAAiB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,GAAG,SAAS,MAAM,MAAM,GAAG;AACtE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AACpC,oBAAoB,CAAC,CAAC,GAAG,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,EAAE;AAC9F,oBAAoB,CAAC,CAAC,GAAG,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;AAC9F,gBAAgB,EAAE;AAClB;AACA,gBAAgB,QAAQ,CAAC,KAAK,CAAC,YAAY,IAAI,KAAK,IAAI,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,IAAI;AAC/E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,EAAE;AAC5C,oBAAoB,CAAC,KAAK,CAAC,YAAY,CAAC;AACxC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC3E,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE;AACpC,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,YAAY,QAAQ;AACpB,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG;AAChF,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,IAAI;AACvE;AACA;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,EAAE;AACnD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI;AACxE;AACA,YAAY,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE;AACpC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC5E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC;AACA,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,SAAS,EAAE,CAAC,GAAG;AAC1E;AACA,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,IAAI,EAAE;AAC5C,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;AACxC;AACA,gBAAgB,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AACpC,oBAAoB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AAChE,oBAAoB,EAAE;AACtB;AACA,oBAAoB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE;AAChF,oBAAoB,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG;AACjC,oBAAoB,MAAM;AAC1B,wBAAwB,CAAC,SAAS,EAAE,IAAI,EAAE;AAC1C,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5D,4BAA4B,MAAM,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,WAAW,GAAG;AAC5F,wBAAwB,GAAG;AAC3B;AACA,oBAAoB,GAAG,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,EAAE,MAAM,CAAC;AACxG,oBAAoB,CAAC,CAAC,SAAS,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AACpE,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,4BAA4B,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,SAAS,EAAE;AACzH,wBAAwB,GAAG;AAC3B,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACjC,oBAAoB,CAAC,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,EAAE;AAC7C,wBAAwB,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,SAAS,GAAG;AAC1E,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,YAAY,CAAC;AACjC,oBAAoB,MAAM;AAC1B,wBAAwB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,4BAA4B,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AACvG,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,IAAI,EAAE;AAChD,wBAAwB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AAC7C;AACA,gBAAgB,EAAE,CAAC,YAAY,CAAC;AAChC,oBAAoB,MAAM;AAC1B,wBAAwB,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,EAAE;AAChD,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC9E,wBAAwB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AAClF;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AAC3E,oBAAoB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AACzC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK;AACtC,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,gBAAgB;AAChC,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,eAAe,CAAC;AAC5C,oBAAoB,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/D,oBAAoB,CAAC,YAAY,CAAC,SAAS,CAAC;AAC5C,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE;AAC/B,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACtE,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AACzC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC;AACxC,gBAAgB,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,oBAAoB,MAAM,CAAC,CAAC,CAAC;AAC7B,gBAAgB,GAAG;AACnB,gBAAgB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC;AACA,gBAAgB,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAChC,oBAAoB,IAAI,CAAC,CAAC,OAAO,EAAE;AACnC,oBAAoB,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC;AAC/C,wBAAwB,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE;AAChD,wBAAwB,KAAK,CAAC;AAC9B,oBAAoB,IAAI,CAAC,CAAC,OAAO,EAAE;AACnC,oBAAoB,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC;AAC/C,wBAAwB,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE;AAC/C,wBAAwB,KAAK,CAAC;AAC9B,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,GAAG;AACnD,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AACvE,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACvD,oBAAoB,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,EAAE;AAChD,oBAAoB,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AAC9C,oBAAoB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AACxC,gBAAgB,CAAC;AACjB,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9E,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;AAC3D;AACA,oBAAoB,GAAG,CAAC,WAAW,CAAC,CAAC,UAAU,CAAC,CAAC,cAAc,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AACtF,oBAAoB,IAAI;AACxB,wBAAwB,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,4BAA4B,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,4BAA4B,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AACpD,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,4BAA4B,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,WAAW,CAAC;AAC1E;AACA,4BAA4B,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;AAClE,4BAA4B,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;AAC5D;AACA,4BAA4B,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7C,4BAA4B,EAAE,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AAC/E,4BAA4B,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM;AACvF,4BAA4B,OAAO,CAAC,IAAI,EAAE;AAC1C,gCAAgC,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAChD,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,UAAU,EAAE;AACpE,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;AACxE,gCAAgC,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC;AAC/D,4BAA4B,GAAG;AAC/B,wBAAwB,GAAG;AAC3B;AACA,oBAAoB,gBAAgB,CAAC,OAAO;AAC5C,wBAAwB,CAAC,IAAI,EAAE;AAC/B,4BAA4B,KAAK,CAAC,CAAC,MAAM,CAAC;AAC1C,4BAA4B,KAAK,CAAC,CAAC,UAAU,CAAC;AAC9C,4BAA4B,MAAM,CAAC,CAAC,OAAO;AAC3C,wBAAwB,KAAK;AAC7B;AACA,oBAAoB,gBAAgB,CAAC,eAAe,CAAC,cAAc,EAAE;AACrE,gBAAgB,GAAG;AACnB;AACA,gBAAgB,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5E,oBAAoB,gBAAgB,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC1D,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,YAAY,IAAI,CAAC,CAAC;AAClB,gBAAgB,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChF,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACpD,oBAAoB,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACrC,wBAAwB,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC;AAC1C,wBAAwB,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACnD,wBAAwB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK;AACxC,oBAAoB,EAAE;AACtB,oBAAoB,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACpD,gBAAgB,GAAG;AACnB;AACA,gBAAgB,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/E,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AACzC,gBAAgB,GAAG;AACnB;AACA,gBAAgB,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChF,oBAAoB,OAAO,GAAG;AAC9B,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,SAAS,GAAG;AACzD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC9C;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACpG,QAAQ,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC7F,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACvG,QAAQ,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,GAAG,CAAC,IAAI;AAC/F;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,QAAQ,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACxC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC9D,QAAQ,GAAG;AACX,QAAQ,uBAAuB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrG,YAAY,uBAAuB,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjF,YAAY,QAAQ,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjC,YAAY,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,GAAG,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,KAAK;AAC3F,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE;AAC7C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC/fF;AACA,EAAE,CAAC,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC3C,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE;AAChC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACjD,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC,MAAM;AACnJ,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AACrG,QAAQ,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,uBAAuB,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ;AAC5H,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK;AACzB,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AAC5B,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK;AAC/B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;AAC3B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,SAAS,EAAE;AACzJ,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM;AAC/C,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC;AACxB,gBAAgB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE;AACxC,oBAAoB,CAAC,MAAM,EAAE,IAAI,EAAE;AACnC,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;AAC3D,oBAAoB,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,gBAAgB,CAAC,IAAI,EAAE;AACvB;AACA,YAAY,EAAE,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS;AACvE,YAAY,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,gBAAgB,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACvD,oBAAoB,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3C,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ;AAC/C,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC;AACxB,gBAAgB,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3C,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,wBAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AACrC,4BAA4B,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACpD,4BAA4B,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACvD,wBAAwB,CAAC,CAAC,IAAI;AAC9B,wBAAwB,CAAC;AACzB,4BAA4B,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;AAC3C,4BAA4B,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACvD,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,OAAO;AACpF,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS;AAChH,gBAAgB,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACjF,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB;AACA,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACrF,gBAAgB,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,EAAE,CAAC,YAAY,EAAE;AAC1E;AACA,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG;AAC1J;AACA,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC;AACvC,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACtI,YAAY,IAAI;AAChB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG;AACvD;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,IAAI,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC/E;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,kBAAkB,GAAG,IAAI,EAAE,IAAI,GAAG;AACjG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,kBAAkB,GAAG;AACzG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AAC1D,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AACzE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AAC5E,YAAY,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAC7C,YAAY,MAAM,CAAC,IAAI,GAAG,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE;AACzF,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5C,gBAAgB,CAAC,MAAM,GAAG;AAC1B,YAAY,MAAM;AAClB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;AACjE,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnE,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACtE,YAAY,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE;AAC7E,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE;AACpD;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE;AACnD,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACvD,YAAY,IAAI,CAAC,IAAI,GAAG,MAAM,GAAG;AACjC;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACpD,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;AAChJ,gBAAgB,GAAG;AACnB;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;AACjC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7E;AACA,YAAY,IAAI;AAChB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI;AACnG,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC3D,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AACvC,oBAAoB,QAAQ,CAAC,YAAY,EAAE;AAC3C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AAC7D,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACxC,wBAAwB,OAAO,CAAC,CAAC,OAAO;AACxC,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AAC5D,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,KAAK,CAAC,eAAe,GAAG;AAC/C,gBAAgB,GAAG;AACnB;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;AACrC,gBAAgB,SAAS,CAAC,MAAM,EAAE,QAAQ,GAAG;AAC7C;AACA,gBAAgB,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE;AACvC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5F,wBAAwB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,GAAG;AACtF,wBAAwB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAC7E,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,GAAG,GAAG;AACzH,wBAAwB,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,GAAG,IAAI,EAAE,CAAC,GAAG;AAC1F,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACpF,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;AAC1G,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrC;AACA,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AACzC,gBAAgB,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE;AACnC,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AAClG,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACjE,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AACxC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE;AACtD,4BAA4B,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,wBAAwB,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC;AAC/C,4BAA4B,MAAM,CAAC,CAAC,CAAC;AACrC,wBAAwB,EAAE,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC;AACzC,4BAA4B,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,GAAG;AACzE,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI;AAC/G,oBAAoB,GAAG;AACvB,gBAAgB,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,IAAI,EAAE;AAC7E,oBAAoB,CAAC,MAAM,EAAE,IAAI,EAAE;AACnC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrG,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,IAAI,CAAC,SAAS,EAAE,IAAI,GAAG,IAAI,KAAK;AAChD,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC5C,gBAAgB,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,EAAE,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,EAAE;AACtE,gBAAgB,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE;AAChD,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAClG,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACjE,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AACxC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC9D,gBAAgB,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,IAAI,EAAE;AAC7E,oBAAoB,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE;AAChD,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACtG,YAAY,CAAC;AACb,YAAY,IAAI,CAAC,CAAC;AAClB,gBAAgB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,GAAG,IAAI,KAAK;AAC7D,YAAY,CAAC;AACb;AACA,YAAY,IAAI;AAChB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,GAAG;AAC7G;AACA,YAAY,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC3B,gBAAgB,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG;AAC/E,gBAAgB,IAAI;AACpB,oBAAoB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,EAAE,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE,QAAQ,GAAG,CAAC,EAAE;AACvM,oBAAoB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,EAAE,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE,QAAQ,GAAG,CAAC,GAAG;AAC1M,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC;AACxB,gBAAgB,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,IAAI,EAAE;AAC7E,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG;AAChF,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,MAAM,EAAE,IAAI,EAAE;AACnC,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,wBAAwB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC;AAC3E,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,EAAE;AACpD,YAAY,IAAI;AAChB,gBAAgB,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,IAAI,EAAE;AAC7E,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,wBAAwB,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC;AACvF,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AAC7C,4BAA4B,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACjE,4BAA4B,CAAC,CAAC,GAAG,CAAC,CAAC;AACnC,4BAA4B,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM;AACnE,gCAAgC,CAAC;AACjC,gCAAgC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC9C,4BAA4B,CAAC,CAAC,GAAG;AACjC,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,MAAM,EAAE,IAAI,EAAE;AACnC,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACjE,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,wBAAwB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC7E,oBAAoB,GAAG;AACvB;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,kBAAkB,CAAC,SAAS,GAAG;AAC9D,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC9E,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC5E,QAAQ,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACrF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACtE,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;AACtD,QAAQ,EAAE,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACzG,QAAQ,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACzF,QAAQ,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC7E,QAAQ,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC/F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC/F;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACvD,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACzWF;AACA,EAAE,CAAC,MAAM,CAAC,uBAAuB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChD,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,kBAAkB,EAAE;AACjD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,CAAC;AAChD,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,CAAC;AAClD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;AAC5B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK;AACzB,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,GAAG,SAAS,EAAE;AAC1E,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrE,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC;AACT;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,aAAa;AAC9E;AACA,IAAI,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE;AAC9B;AACA,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,IAAI,EAAE;AACvB,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC;AACvB,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC;AAC1B,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AAC7C,IAAI,CAAC;AACL,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE;AACzB,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,GAAG;AACtC,IAAI,CAAC;AACL;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AACrE,gBAAgB,OAAO,CAAC,CAAC,OAAO;AAChC,YAAY,EAAE;AACd,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC;AAC5C,gBAAgB,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;AACxC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AAC3C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,QAAQ,EAAE;AACrC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;AAChG,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,GAAG;AACzC;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACxD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,gBAAgB;AACpD,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,GAAG;AAClC,YAAY,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,GAAG,KAAK,CAAC,IAAI,EAAE;AAC9C;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,uBAAuB,GAAG,IAAI,EAAE,IAAI,GAAG;AAChG,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,uBAAuB,GAAG,MAAM,EAAE,CAAC,GAAG;AACvH,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AAC5D,gBAAgB,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE;AACzD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,YAAY,GAAG;AAChE;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,YAAY,IAAI;AAC9D;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F,gBAAgB,CAAC;AACjB;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC1C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AACjG,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,QAAQ;AACvB,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,iBAAiB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,GAAG,SAAS,MAAM,MAAM,GAAG;AACtE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AACpC,oBAAoB,CAAC,CAAC,GAAG,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,EAAE;AAC9F,oBAAoB,CAAC,CAAC,GAAG,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;AAC9F,gBAAgB,EAAE;AAClB;AACA,gBAAgB,QAAQ,CAAC,KAAK,CAAC,YAAY,IAAI,KAAK,IAAI,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,IAAI;AAC/E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,EAAE;AAC5C,oBAAoB,CAAC,KAAK,CAAC,YAAY,CAAC;AACxC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK;AAC3E,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE;AACpC,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,YAAY,QAAQ;AACpB,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG;AAChF,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,IAAI;AACvE;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,EAAE;AACnD,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI;AACxE;AACA,YAAY,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,QAAQ,EAAE;AACjD;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,CAAC,EAAE;AAClD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE;AACtD;AACA,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,SAAS,EAAE,CAAC,GAAG;AACtE;AACA,gBAAgB,MAAM;AACtB,oBAAoB,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,IAAI,GAAG;AAC7C,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;AACpD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AAC/E,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE;AACtD,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,IAAI;AACxB,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,eAAe,CAAC;AAC7C,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AACzC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC;AACxC,gBAAgB,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,oBAAoB,MAAM,CAAC,CAAC,CAAC;AAC7B,gBAAgB,GAAG;AACnB,gBAAgB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC;AACA,gBAAgB,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAChC,oBAAoB,IAAI,CAAC,CAAC,OAAO,EAAE;AACnC,oBAAoB,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC;AAC/C,wBAAwB,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE;AAChD,wBAAwB,KAAK,CAAC;AAC9B,oBAAoB,IAAI,CAAC,CAAC,OAAO,EAAE;AACnC,oBAAoB,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC;AAC/C,wBAAwB,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE;AAC/C,wBAAwB,KAAK,CAAC;AAC9B,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,GAAG;AACnD,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,GAAG;AAC7C;AACA,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AACvE,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD;AACA,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB;AACA,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACvD,oBAAoB,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,EAAE;AAChD,oBAAoB,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AAC9C,oBAAoB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AACxC,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,SAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,SAAS,GAAG;AACrE,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACpE,QAAQ,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACxC,QAAQ,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC;AAC9B,YAAY,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACvC,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK;AAC5B,QAAQ,EAAE;AACV,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACpE,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACpG,QAAQ,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC7F,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,QAAQ,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACxC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjF,YAAY,QAAQ,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjC,YAAY,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,GAAG,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,KAAK;AAC3F,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE;AAC7C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC1XF,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACnC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE;AAC5D,QAAQ,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACzB,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,GAAG;AACxC,QAAQ,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AACrB,QAAQ,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AACtB,QAAQ,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AAC1B,QAAQ,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AACtB,QAAQ,QAAQ,CAAC;AACjB,QAAQ,QAAQ,CAAC;AACjB,QAAQ,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1C,QAAQ,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACzC,QAAQ,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AAC/B,QAAQ,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AAC1B,QAAQ,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,oBAAoB,GAAG;AACrD,QAAQ,uBAAuB,CAAC,CAAC,CAAC,KAAK,CAAC;AACxC,QAAQ,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,GAAG;AAC9C,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG;AAC9B,QAAQ,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG;AACpC,QAAQ,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG;AACpC;AACA,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AACrE,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AACrE;AACA,QAAQ,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AAC3E,QAAQ,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AAC3E;AACA,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AACvF,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AACvF;AACA,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,WAAW,GAAG,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AAC5E,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,WAAW,GAAG,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,QAAQ,EAAE;AAC5E;AACA,QAAQ,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG,KAAK,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,QAAQ,EAAE;AAC7F,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG,KAAK,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,GAAG,QAAQ,CAAC,QAAQ,EAAE;AACnF,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,GAAG,KAAK,CAAC,OAAO,EAAE,MAAM,EAAE,KAAK,GAAG,QAAQ,CAAC,QAAQ,EAAE;AACpF;AACA,QAAQ,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,EAAE;AAC/C,QAAQ,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AACtC,QAAQ,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AACjC;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE;AAC9E,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AAChG,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AAChG,YAAY,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AACtG,YAAY,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AACtG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AAChG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AAChG,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AAChG,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AAChG;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AACjE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AAClD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE;AACvF,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE;AACvF,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,oBAAoB,EAAE;AACtB,gBAAgB,GAAG;AACnB;AACA,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACjG,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,cAAc,GAAG;AAC5C;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,IAAI,CAAC,UAAU,GAAG,IAAI,EAAE,IAAI,GAAG;AAC7E,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,GAAG,MAAM,EAAE,CAAC,GAAG;AACpG;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,UAAU,GAAG;AAC3D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,UAAU,GAAG;AAC3D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,SAAS,GAAG;AAC1D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,SAAS,GAAG;AAC1D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,aAAa,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,aAAa,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,UAAU,GAAG;AAC3D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,UAAU,GAAG;AAC3D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,UAAU,GAAG;AAC3D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D;AACA,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,gBAAgB,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACpD,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AAChE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC;AACvF,gBAAgB,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE;AACA,gBAAgB,MAAM,CAAC,KAAK,CAAC,WAAW,EAAE;AAC1C,gBAAgB,MAAM,CAAC,KAAK,CAAC,WAAW,EAAE;AAC1C;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,UAAU,EAAE;AACvC,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AACtD,wBAAwB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC;AAChH,wBAAwB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,mBAAmB,EAAE;AACzG,wBAAwB,MAAM,CAAC,MAAM,CAAC;AACtC,oBAAoB,GAAG;AACvB,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F,gBAAgB,CAAC;AACjB;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,UAAU,EAAE;AACvC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAClG,YAAY,CAAC;AACb;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC;AACzC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,KAAK;AACtI,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC;AACzC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,KAAK;AACtI,YAAY,SAAS;AACrB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,KAAK;AACzI,YAAY,SAAS;AACrB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,KAAK;AACzI,YAAY,KAAK;AACjB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,GAAG,KAAK;AACrI,YAAY,KAAK;AACjB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,GAAG,KAAK;AACrI,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC;AACzC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,KAAK;AACtI,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC;AACzC,gBAAgB,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,KAAK;AACtI;AACA,YAAY,CAAC,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACrF;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,UAAU,EAAE;AACpD,gBAAgB,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC3E,YAAY,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,aAAa,EAAE;AAC1D,gBAAgB,CAAC,KAAK,CAAC,aAAa,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC9E,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,SAAS,EAAE;AAClD,gBAAgB,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC1E,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,UAAU,EAAE;AACpD,gBAAgB,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC3E,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,UAAU,EAAE;AACpD,gBAAgB,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC3E,YAAY,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,aAAa,EAAE;AAC1D,gBAAgB,CAAC,KAAK,CAAC,aAAa,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC9E,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,SAAS,EAAE;AAClD,gBAAgB,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC1E,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,UAAU,EAAE;AACpD,gBAAgB,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,IAAI;AAC3E;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AACpH,gBAAgB,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACtF,YAAY,GAAG,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;AACzC,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AACpH,gBAAgB,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACtF,YAAY,GAAG,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;AACzC;AACA,YAAY,OAAO,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,MAAM,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtH,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,eAAe,GAAG;AAC7C;AACA,YAAY,OAAO,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE,MAAM,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtH,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,eAAe,GAAG;AAC7C;AACA,YAAY,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAC7C,YAAY,SAAS,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAChD,YAAY,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAC5C,YAAY,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAC7C;AACA,YAAY,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAC7C,YAAY,SAAS,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAChD,YAAY,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAC5C,YAAY,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI;AAC7C;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,GAAG;AAC1E,YAAY,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,GAAG;AAC1E;AACA,YAAY,EAAE,CAAC,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,GAAG;AACvE,YAAY,EAAE,CAAC,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,GAAG;AACvE;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,GAAG;AAC1E,YAAY,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,UAAU,EAAE,IAAI,CAAC,MAAM,GAAG;AAC1E;AACA,YAAY,EAAE,CAAC,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,aAAa,EAAE,IAAI,CAAC,SAAS,GAAG;AACnF,YAAY,EAAE,CAAC,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,UAAU,CAAC,aAAa,EAAE,IAAI,CAAC,SAAS,GAAG;AACnF;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,gBAAgB,CAAC,QAAQ,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE;AAC/C;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACrC,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AAC3E,YAAY,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACpD,gBAAgB,CAAC,IAAI,CAAC,KAAK,EAAE;AAC7B;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AAC/C;AACA;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AACrD,gBAAgB,CAAC,IAAI,CAAC,MAAM,EAAE;AAC9B;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AAC/C;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,GAAG;AACrD,gBAAgB,CAAC,IAAI,CAAC,MAAM,EAAE;AAC9B;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACtC,gBAAgB,CAAC,OAAO,EAAE,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AACtE,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AACxE;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACtC,gBAAgB,CAAC,OAAO,EAAE,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AACtE,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AACxE;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,uBAAuB,EAAE;AACxC,gBAAgB,gBAAgB;AAChC,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,eAAe,CAAC;AAC5C,oBAAoB,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/D,oBAAoB,CAAC,YAAY,CAAC,SAAS,CAAC;AAC5C,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE;AAC/B,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACtE,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW;AACzC,YAAY,8DAA8D;AAC1E;AACA,YAAY,QAAQ,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1C,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAChF,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AACxC,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC9B,oBAAoB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AACvC,oBAAoB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC;AAC3C,oBAAoB,GAAG,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG;AACvC,gBAAgB,EAAE;AAClB,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC;AAChC,oBAAoB,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,qBAAqB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,CAAC,GAAG,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,KAAK,EAAE;AACnC,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7C,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAChF,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AACxC,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC9B,oBAAoB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AACvC,oBAAoB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC;AAC3C,oBAAoB,GAAG,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG;AACvC,gBAAgB,EAAE;AAClB,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;AAClC,oBAAoB,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,qBAAqB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,CAAC,GAAG,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,KAAK,EAAE;AACnC,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3C,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAChF,gBAAgB,GAAG,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,GAAG,CAAC,KAAK,EAAE;AACvD,gBAAgB,GAAG,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,GAAG,CAAC,KAAK,EAAE;AACvD,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC;AAChC,oBAAoB,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,qBAAqB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,CAAC,GAAG,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,KAAK,EAAE;AACnC,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC;AACzC,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAChF;AACA,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AAChD,gBAAgB,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACjC,oBAAoB,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AAC/C,oBAAoB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AACrC,oBAAoB,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG;AACrC,gBAAgB,EAAE;AAClB,gBAAgB,OAAO;AACvB,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC;AAChC,oBAAoB,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,qBAAqB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACxD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,CAAC,GAAG,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,KAAK,EAAE;AACnC,YAAY,CAAC;AACb;AACA;AACA;AACA,YAAY,QAAQ,CAAC,eAAe,EAAE,CAAC,CAAC;AACxC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC1D,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE;AACtC,gBAAgB,GAAG,CAAC,CAAC;AACrB,kBAAkB,KAAK,CAAC,eAAe,GAAG;AAC1C,gBAAgB,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG;AAC5B,cAAc,CAAC;AACf,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;AAC/D,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC1D,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE;AACtC,gBAAgB,GAAG,CAAC,CAAC;AACrB,kBAAkB,KAAK,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;AAClE,gBAAgB,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG;AAC5B,cAAc,CAAC;AACf,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,uBAAuB,EAAE;AACxC,gBAAgB,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9E,oBAAoB,eAAe,GAAG;AACtC,oBAAoB,GAAG,CAAC,WAAW,CAAC,CAAC,UAAU,CAAC,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AAC9E,oBAAoB,IAAI;AACxB,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAChD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG;AAChD,wBAAwB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChF,4BAA4B,MAAM,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AAC9F,wBAAwB,GAAG;AAC3B;AACA,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACnG,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,aAAa,CAAC,UAAU,EAAE;AAC9D,wBAAwB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,UAAU,EAAE;AACvE,wBAAwB,EAAE,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AACnD,4BAA4B,cAAc,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,IAAI,EAAE;AAChE,wBAAwB,CAAC;AACzB,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;AACxD,wBAAwB,EAAE,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,wBAAwB,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,GAAG;AAC1G,wBAAwB,OAAO,CAAC,IAAI,EAAE;AACtC,4BAA4B,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5C,4BAA4B,KAAK,CAAC,CAAC,WAAW,CAAC;AAC/C,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;AACpE,4BAA4B,IAAI,CAAC,CAAC,KAAK,CAAC;AACxC,4BAA4B,KAAK,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM;AACtE,wBAAwB,GAAG;AAC3B,oBAAoB,GAAG;AACvB;AACA,oBAAoB,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,CAAC;AACrD,wBAAwB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE;AACzE,oBAAoB,EAAE;AACtB;AACA,oBAAoB,gBAAgB,CAAC,OAAO;AAC5C,wBAAwB,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,4BAA4B,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5D,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,cAAc,CAAC,gBAAgB,CAAC,OAAO,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,qBAAqB,CAAC;AAC3G,wBAAwB,CAAC,IAAI,EAAE;AAC/B,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,UAAU,CAAC,EAAE;AACvE,4BAA4B,KAAK,CAAC,CAAC,UAAU,CAAC;AAC9C,4BAA4B,MAAM,CAAC,CAAC,OAAO;AAC3C,wBAAwB,KAAK;AAC7B;AACA,oBAAoB,gBAAgB,CAAC,eAAe,CAAC,cAAc,EAAE;AACrE,gBAAgB,GAAG;AACnB;AACA,gBAAgB,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5E,oBAAoB,eAAe,GAAG;AACtC,gBAAgB,GAAG;AACnB,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,cAAc,EAAE;AAC/E,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,cAAc,EAAE;AAC/E,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AACxC,gBAAgB,GAAG;AACnB,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AACxC,gBAAgB,GAAG;AACnB;AACA,gBAAgB,SAAS,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,iBAAiB,EAAE;AACrF,gBAAgB,SAAS,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,iBAAiB,EAAE;AACrF,gBAAgB,SAAS,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChF,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AACxC,gBAAgB,GAAG;AACnB,gBAAgB,SAAS,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAChF,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AACxC,gBAAgB,GAAG;AACnB;AACA,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,eAAe,EAAE;AAChF,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,eAAe,EAAE;AAChF,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AACxC,gBAAgB,GAAG;AACnB,gBAAgB,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AACxC,gBAAgB,GAAG;AACnB;AACA,gBAAgB,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,aAAa,EAAE;AAC7E,gBAAgB,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,aAAa,EAAE;AAC7E;AACA,gBAAgB,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5E,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AACzC,gBAAgB,GAAG;AACnB,gBAAgB,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5E,oBAAoB,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AACzC,gBAAgB,GAAG;AACnB,gBAAgB,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,oBAAoB,OAAO,GAAG;AAC9B,gBAAgB,GAAG;AACnB,gBAAgB,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7E,oBAAoB,OAAO,GAAG;AAC9B,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO;AACjC,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;AAChC,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;AAChC,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC9C;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AAC1F,QAAQ,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxF,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,mBAAmB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,mBAAmB,CAAC,CAAC,IAAI;AACzH;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3B,YAAY,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3B,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3B,YAAY,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3B,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACxB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,GAAG;AACX,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3E,YAAY,UAAU,CAAC,CAAC,CAAC;AACzB,YAAY,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE;AACjC,YAAY,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE;AACjC,YAAY,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE;AACjC,YAAY,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE;AACjC,QAAQ,GAAG;AACX;AACA,QAAQ,uBAAuB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrG,YAAY,uBAAuB,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,MAAM,CAAC,WAAW,CAAC,KAAK,EAAE;AAC1C,gBAAgB,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE;AACzC,gBAAgB,MAAM,CAAC,WAAW,CAAC,KAAK,EAAE;AAC1C,gBAAgB,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE;AACzC,gBAAgB,MAAM,CAAC,WAAW,CAAC,KAAK,EAAE;AAC1C,gBAAgB,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE;AACzC,gBAAgB,MAAM,CAAC,WAAW,CAAC,KAAK,EAAE;AAC1C,gBAAgB,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE;AACzC,gBAAgB,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE;AAC7C,gBAAgB,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE;AAC7C,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,OAAO,CAAC,QAAQ,CAAC,KAAK,EAAE;AAClH,cAAc,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACvC,YAAY,GAAG;AACf,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACrlBF;AACA,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACjD,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;AACnD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACjD,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAC/C,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,OAAO,KAAK,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK;AACtI,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACzB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK;AAC7B,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE;AACvL,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC;AAC9B,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClE;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,GAAG;AACrF;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC;AACxB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG;AAC1J,YAAY,IAAI;AAChB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACvE;AACA,YAAY,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;AACjC,oBAAoB,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,GAAG;AACtE,oBAAoB,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE;AACtE,gBAAgB,CAAC;AACjB,YAAY,EAAE,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AACpD;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK;AAC3H,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,GAAG,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE,MAAM,GAAG;AAChG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,GAAG;AAC9F,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AACzD;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,SAAS;AACrB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,QAAQ,CAAC,UAAU,EAAE;AACzC,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACtC,wBAAwB,EAAE,CAAC,CAAC,EAAE;AAC9B,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACvD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AAC9D,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AAC1F;AACA,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,GAAG,SAAS,GAAG,EAAE,CAAC,IAAI,EAAE;AACtE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AAChD,YAAY,KAAK,CAAC,IAAI,GAAG,MAAM,GAAG;AAClC;AACA,YAAY,KAAK,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/J,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,oBAAoB,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE;AACpC,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1C,4BAA4B,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG;AAC9C,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,wBAAwB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,CAAC,CAAC,EAAE,EAAE,CAAC;AAC/B,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,wBAAwB,CAAC,CAAC,EAAE,GAAG,EAAE;AACjC,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG;AAC5D,wBAAwB,CAAC,CAAC,CAAC,EAAE,EAAE;AAC/B,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3C,4BAA4B,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG;AAC7C,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,wBAAwB,CAAC,CAAC,EAAE,EAAE,CAAC;AAC/B,wBAAwB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,CAAC,CAAC,EAAE,EAAE,EAAE;AAChC,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE;AACvH,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE;AACjE,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE;AACnE,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAC/E,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACxF;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,GAAG,CAAC,SAAS;AAC7D,YAAY,KAAK,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gBAAgB,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5H,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC;AAChC,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE;AACvH,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,oBAAoB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E,oBAAoB,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE;AACpC,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1C,4BAA4B,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG;AAC9C,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,wBAAwB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,CAAC,CAAC,EAAE,EAAE,CAAC;AAC/B,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,wBAAwB,CAAC,CAAC,EAAE,GAAG,EAAE;AACjC,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACzC,4BAA4B,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG;AAC9C,wBAAwB,CAAC,CAAC,CAAC,EAAE,EAAE;AAC/B,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3C,4BAA4B,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG;AAC7C,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,wBAAwB,CAAC,CAAC,EAAE,EAAE,CAAC;AAC/B,wBAAwB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,CAAC,CAAC,EAAE,EAAE,EAAE;AAChC,gBAAgB,GAAG;AACnB,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA;AACA,IAAI,EAAE,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC;AAC5E,IAAI,KAAK,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC9D,QAAQ,KAAK,CAAC,eAAe,GAAG;AAChC,QAAQ,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC;AAChE,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,WAAW,CAAC;AAC1C,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACxC,QAAQ,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE;AACtD,YAAY,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,CAAC;AACpC,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC/E,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACvE,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AACvE,QAAQ,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACnF,QAAQ,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACnF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACjF,QAAQ,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACnF,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACrF,QAAQ,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AACzE,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC9F;AACA,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC1E,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC1E,QAAQ,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACpF,QAAQ,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAChF,QAAQ,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAC9E;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC1E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC7E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC3E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACtOF,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC;AACzD,EAAE,CAAC,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,CAAC,OAAO;AACzC,EAAE,CAAC,MAAM,CAAC,mBAAmB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC5C,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACzD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI;AAC/B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,IAAI;AAChC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,EAAE;AAChC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAChB,QAAQ,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC;AACnD,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;AAC5B,QAAQ,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,EAAE;AAChC,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AAC7B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAE;AACtC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;AACzB,QAAQ,CAAC,CAAC,UAAU;AACpB,QAAQ,CAAC,CAAC,UAAU;AACpB,QAAQ,CAAC,CAAC,UAAU;AACpB,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AAC9B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AAC9B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,aAAa,EAAE;AAC9M,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC/E,YAAY,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAClF;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,WAAW,EAAE,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC;AAC1D,YAAY,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/C,gBAAgB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AACjC,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;AACrC,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE;AACjD,wBAAwB,GAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACtF,wBAAwB,OAAO,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG;AACnE,gBAAgB,GAAG;AACnB,gBAAgB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC;AAC/B,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,GAAG;AACtE,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9B,YAAY,EAAE,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI;AACvD,YAAY;AACZ,YAAY,cAAc,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,GAAG,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AACtJ,YAAY,iBAAiB,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG;AAC3F,YAAY;AACZ,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE,MAAM,CAAC,iBAAiB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI;AAChH;AACA,YAAY,EAAE,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC;AAC/D,YAAY,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC;AAC1E,YAAY,GAAG,CAAC,iBAAiB,CAAC,CAAC,CAAC,GAAG;AACvC,YAAY,GAAG,CAAC,wBAAwB,CAAC,CAAC,CAAC,KAAK,CAAC;AACjD,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,YAAY;AACZ,YAAY,cAAc,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACnF,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE;AACpC,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE;AACpC,gBAAgB,GAAG,CAAC,mBAAmB,CAAC,CAAC,CAAC,KAAK,CAAC;AAChD,gBAAgB,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;AAClF,gBAAgB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC;AAC/C,oBAAoB,mBAAmB,CAAC,CAAC,CAAC,IAAI,CAAC;AAC/C,oBAAoB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B,oBAAoB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5B,gBAAgB,CAAC;AACjB,gBAAgB,EAAE,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK;AACvD,gBAAgB,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;AAClC,oBAAoB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC;AACjB,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG;AAClF,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,oBAAoB,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;AAC5E,oBAAoB,EAAE,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC;AAC9C,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,GAAG,CAAC,EAAE;AAC/C,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,GAAG,CAAC,EAAE;AAC/C,oBAAoB,CAAC;AACrB,wBAAwB,EAAE,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC;AACtF,oBAAoB,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC;AAChE,wBAAwB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1E,wBAAwB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1E,oBAAoB,CAAC;AACrB,wBAAwB,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC;AACnG,oBAAoB,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC;AAC3C,wBAAwB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1E,wBAAwB,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,GAAG,CAAC,EAAE;AAChE,wBAAwB,wBAAwB,CAAC,CAAC,CAAC,IAAI,CAAC;AACxD,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB,gBAAgB,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC;AAChJ,gBAAgB,EAAE,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,CAAC;AAC9E,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACxC,oBAAoB,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE;AACvC,oBAAoB,CAAC,KAAK,GAAG,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC9D;AACA,gBAAgB,uBAAuB,CAAC,CAAC,CAAC,GAAG;AAC7C,gBAAgB,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,UAAU,EAAE,CAAC,UAAU,EAAE,EAAE,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,EAAE;AAC7H,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,mBAAmB,GAAG,IAAI,EAAE,IAAI,GAAG;AAC5F,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,mBAAmB,GAAG;AAC1G,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,mBAAmB,CAAC,UAAU,GAAG;AAClF,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,mBAAmB,CAAC,UAAU,GAAG;AAClF,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,mBAAmB,CAAC,iBAAiB,GAAG;AACzF;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,OAAO,CAAC,WAAW,EAAE;AAC9D,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,EAAE;AAC7C,wBAAwB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,SAAS,CAAC;AACnD,wBAAwB,CAAC,EAAE,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC;AAC7C,wBAAwB,CAAC,EAAE,EAAE,OAAO,EAAE,CAAC,OAAO,EAAE;AAChD;AACA,YAAY,EAAE,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK;AAC/D,YAAY,GAAG,CAAC,iBAAiB,CAAC,CAAC,qBAAqB,CAAC;AACzD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,EAAE;AACnD,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACrD,YAAY,EAAE,CAAC,EAAE,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;AAC/B,gBAAgB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,EAAE;AACrH,gBAAgB,iBAAiB,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,iBAAiB,GAAG,SAAS,EAAE,IAAI,GAAG,IAAI,EAAE,QAAQ,GAAG;AACzG,gBAAgB,iBAAiB,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG;AACzD,gBAAgB,iBAAiB,CAAC,IAAI,GAAG,MAAM,GAAG;AAClD,gBAAgB,iBAAiB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC1E,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AACjE,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AACjE,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAClE,IAAI;AACJ,gBAAgB,EAAE,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI;AAC9E,gBAAgB,qBAAqB,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,iBAAiB,GAAG,SAAS,EAAE,IAAI,GAAG,IAAI,EAAE,oBAAoB,GAAG;AACzH,gBAAgB,qBAAqB,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,oBAAoB,GAAG;AAClF,gBAAgB,qBAAqB,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG;AAC7D,gBAAgB,qBAAqB,CAAC,IAAI,GAAG,MAAM,GAAG;AACtD,gBAAgB,qBAAqB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,eAAe,CAAC;AAChE,wBAAwB,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AAC5H,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAClE,wBAAwB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG;AACzD,YAAY,CAAC;AACb,YAAY,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC;AACrD,YAAY,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,UAAU,GAAG,SAAS,EAAE,IAAI,GAAG,IAAI,CAAC,IAAI,EAAE;AACjF,YAAY,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG;AAC9C,YAAY,UAAU,CAAC,IAAI,GAAG,MAAM,GAAG;AACvC,YAAY,UAAU,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACvC;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC;AACnD,YAAY,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,UAAU,GAAG,SAAS,EAAE,IAAI,GAAG,IAAI,CAAC,IAAI,EAAE;AACjF,YAAY,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AAC7C,YAAY,UAAU,CAAC,IAAI,GAAG,MAAM,GAAG;AACvC,YAAY,UAAU,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC;AACtC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxD,gBAAgB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW,GAAG;AACvF,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACpF,YAAY,UAAU,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxD,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE;AAClI,gBAAgB,QAAQ,CAAC,gBAAgB,EAAE;AAC3C,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAClC,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AACrC,oBAAoB,UAAU,CAAC,CAAC,iBAAiB;AACjD,gBAAgB,GAAG;AACnB;AACA,YAAY,GAAG;AACf,YAAY,UAAU,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjI,gBAAgB,QAAQ,CAAC,eAAe,EAAE;AAC1C,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAClC,oBAAoB,KAAK,CAAC,CAAC,CAAC;AAC5B,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf,YAAY,UAAU,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxD,gBAAgB,QAAQ,CAAC,gBAAgB,GAAG;AAC5C,YAAY,GAAG;AACf,YAAY,UAAU,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gBAAgB,QAAQ,CAAC,YAAY,EAAE;AACvC,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5B,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf,YAAY,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC;AACtD,YAAY,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,GAAG,IAAI,CAAC,iBAAiB,EAAE;AAC3E,YAAY,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,mBAAmB,CAAC,SAAS,GAAG;AACnH;AACA,YAAY,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG;AAClG,YAAY,eAAe,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACjE;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC;AACrC,YAAY,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE;AAC1C,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE;AAC1C,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC,IAAI,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,GAAG,EAAE;AACnC,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AAC9C,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;AAClD,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACvC,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO;AACxC,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,QAAQ,CAAC,gBAAgB,GAAG;AAChD,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,CAAC,QAAQ,EAAE;AAChC;AACA,YAAY,eAAe,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,eAAe,GAAG;AAC5E,YAAY,UAAU,CAAC,IAAI,GAAG,MAAM,GAAG;AACvC,YAAY,UAAU,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AAC/E;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AACnD,YAAY,YAAY,CAAC,YAAY,EAAE;AACvC;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,GAAG,CAAC,GAAG;AAC9F,oBAAoB,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG;AACxF,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE;AAC/C;AACA,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,MAAM;AACnC,YAAY,MAAM,CAAC,CAAC,CAAC,GAAG;AACxB,YAAY,UAAU,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,gBAAgB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,oBAAoB,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE,CAAC,CAAC;AAChI,wBAAwB,MAAM,CAAC,IAAI,CAAC;AACpC,oBAAoB,CAAC;AACrB,oBAAoB,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI;AAC7H,gBAAgB,GAAG;AACnB,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC;AAC7B,oBAAoB,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE;AACnC,gBAAgB,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACjD;AACA,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC;AACpF,eAAe,QAAQ,CAAC,aAAa,CAAC,MAAM,EAAE;AAC9C,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;AACvD,YAAY,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChE,oBAAoB,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI;AAClF,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC;AACnH,wBAAwB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,MAAM,GAAG;AACvD,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,GAAG;AACrD,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E;AACA,wBAAwB,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM;AAC9F,wBAAwB,EAAE,CAAC,CAAC,uBAAuB,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzE;AACA,4BAA4B,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,KAAK,EAAE,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AAC9H,4BAA4B,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,QAAQ,EAAE;AACvD,4BAA4B,uBAAuB,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,EAAE;AAChE,wBAAwB,CAAC;AACzB,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC3F,4BAA4B,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,GAAG;AAC7D,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB;AACA,oBAAoB,EAAE,EAAE,CAAC,kBAAkB,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AACnH,oBAAoB,EAAE,CAAC,CAAC,iBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AAC1D,wBAAwB,EAAE,CAAC,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC;AAC7F,4BAA4B,iBAAiB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,MAAM,GAAG;AACzE,4BAA4B,qBAAqB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,MAAM,GAAG;AAC7E,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,iBAAiB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,IAAI,GAAG;AACvE,4BAA4B,qBAAqB,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,IAAI,GAAG;AAC3E,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;AACjE,gBAAgB,IAAI;AACpB,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,CAAC;AAC5C,gBAAgB,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,oBAAoB,EAAE,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC;AAC7F,oBAAoB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,GAAG,MAAM,GAAG;AACxE,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACvC,wBAAwB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,EAAE;AACpL,oBAAoB,CAAC;AACrB,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACnC,wBAAwB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,EAAE;AACrD,oBAAoB,CAAC;AACrB,oBAAoB,EAAE,CAAC,CAAC,OAAO,CAAC;AAChC,wBAAwB,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE;AAC9D,gBAAgB,GAAG;AACnB,gBAAgB;AAChB,gBAAgB,UAAU,CAAC,MAAM,GAAG,EAAE,CAAC,eAAe,EAAE;AACxD,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,EAAE;AAC7D;AACA,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,SAAS,EAAE,IAAI,EAAE;AACtC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;AACvC,gBAAgB;AAChB,gBAAgB,WAAW,GAAG;AAC9B,YAAY,CAAC;AACb,YAAY;AACZ,YAAY,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC;AAC/E,YAAY,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC;AACnC,gBAAgB,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC;AACzE,gBAAgB,EAAE,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7C,oBAAoB,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC;AACxC,oBAAoB,YAAY,CAAC,IAAI,EAAE;AACvC,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,YAAY;AACZ,YAAY,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC;AAC/E,YAAY,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC;AAC9B,gBAAgB,OAAO,CAAC,CAAC,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,KAAK,GAAG,CAAC,GAAG;AAC9F,gBAAgB,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG;AACnF;AACA,gBAAgB,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO;AACrD,gBAAgB,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,wBAAwB,SAAS,CAAC,CAAC,CAAC,CAAC;AACrC,wBAAwB,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE;AAC3C,wBAAwB,MAAM,CAAC,CAAC,KAAK,CAAC;AACtC,wBAAwB,UAAU,CAAC,CAAC,KAAK;AACzC,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB;AACA,gBAAgB,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI;AACxD,gBAAgB,UAAU,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,oBAAoB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjE,wBAAwB,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;AAC/I,wBAAwB,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI;AACjI,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACpD,gBAAgB,GAAG;AACnB,gBAAgB;AAChB,gBAAgB,WAAW,GAAG;AAC9B,gBAAgB;AAChB,gBAAgB,QAAQ,CAAC,KAAK,EAAE;AAChC,oBAAoB,OAAO,CAAC,CAAC,OAAO,CAAC;AACrC,oBAAoB,MAAM,CAAC,CAAC,MAAM;AAClC,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,YAAY,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAC;AACjC,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC;AACvE,gBAAgB,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,GAAG,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,uBAAuB,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAClI,wBAAwB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AACxC,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,MAAM,GAAG,CAAC,EAAE;AACjE,wBAAwB,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5C,gBAAgB,GAAG;AACnB,gBAAgB,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,cAAc,EAAE;AAC1D,YAAY,CAAC,WAAW;AACxB,YAAY,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AACpC,gBAAgB,UAAU,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,EAAE;AAC7C,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG;AAC9F,wBAAwB,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,MAAM,GAAG;AAChE,wBAAwB;AACxB,wBAAwB,EAAE,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM;AACtE,wBAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC;AAC1D,wBAAwB,CAAC;AACzB,4BAA4B,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG;AACrD,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,MAAM,GAAG,CAAC,EAAE,CAAC;AACvE,gCAAgC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,GAAG;AACvE,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,MAAM,GAAG,CAAC,EAAE;AACvE,gCAAgC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,GAAG,IAAI;AAC7E,wBAAwB,CAAC;AACzB,4BAA4B;AAC5B,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,UAAU,CAAC,CAAC,CAAC,MAAM,EAAE,UAAU,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,IAAI;AACxH,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,YAAY,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAgB,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE;AACxE,gBAAgB,UAAU,CAAC,IAAI,EAAE,UAAU,EAAE,CAAC,CAAC,MAAM,GAAG;AACxD,YAAY,CAAC;AACb,YAAY,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,gBAAgB,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG;AAClH,gBAAgB,UAAU,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AAC3C,gBAAgB,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,iBAAiB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG;AACxH,gBAAgB,iBAAiB,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC7F,gBAAgB,CAAC,CAAC,MAAM,CAAC,iBAAiB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI;AAChF,gBAAgB,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG;AACpH,YAAY,CAAC;AACb,YAAY,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,gBAAgB,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;AAClD,gBAAgB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,EAAE;AACvC,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK;AAC5F,gBAAgB,UAAU;AAC1B,kBAAkB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACnC,gBAAgB,UAAU;AAC1B,kBAAkB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC;AAClC,kBAAkB,CAAC,IAAI,EAAE,UAAU,EAAE,CAAC,IAAI,EAAE;AAC5C;AACA,gBAAgB,QAAQ,CAAC,eAAe,CAAC,iBAAiB,EAAE;AAC5D,YAAY,CAAC;AACb,YAAY,QAAQ,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpC,gBAAgB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,WAAW,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,IAAI;AAC/F,QAAQ,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,UAAU,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,IAAI;AAChG,QAAQ,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAChH,QAAQ,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7G,QAAQ,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9F,QAAQ,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3F,QAAQ,WAAW,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,KAAK,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI;AACtG,QAAQ,oBAAoB,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,oBAAoB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,oBAAoB,CAAC,CAAC,IAAI;AAC1H,QAAQ;AACR,QAAQ,EAAE,CAAC,UAAU,CAAC,OAAO;AAC7B,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpH,YAAY,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,YAAY,EAAE,CAAC,UAAU,EAAE,UAAU,EAAE,CAAC,CAAC,GAAG,CAAC,aAAa,CAAC,OAAO,GAAG;AACrE,YAAY,EAAE,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,gBAAgB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC1E,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AACtE,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,QAAQ,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxH,YAAY,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,YAAY,EAAE,CAAC,UAAU,EAAE,cAAc,EAAE,CAAC,CAAC,GAAG,CAAC,aAAa,CAAC,OAAO,GAAG;AACzE,YAAY,cAAc,CAAC,CAAC,CAAC,GAAG;AAChC,YAAY,EAAE,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,gBAAgB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC1E,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvE,YAAY,CAAC;AACb,CAAC;AACD,QAAQ,GAAG;AACX,QAAQ,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChI,YAAY,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,YAAY,EAAE,CAAC,UAAU,EAAE,gBAAgB,EAAE,CAAC,CAAC,GAAG,CAAC,aAAa,CAAC,OAAO,GAAG;AAC3E,YAAY,EAAE,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,gBAAgB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC7E,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1E,YAAY,CAAC;AACb;AACA,QAAQ,GAAG;AACX,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,EAAE,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC9E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC/E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC7E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC1eF,EAAE,CAAC,MAAM,CAAC,wBAAwB,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;AAClD,QAAQ,CAAC,GAAG,CAAC,MAAM,EAAE;AACrB,QAAQ,8DAA8D;AACtE,QAAQ,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AACjD,QAAQ,8DAA8D;AACtE;AACA,QAAQ,GAAG,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,mBAAmB,EAAE;AACjE,QAAQ,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACvC,QAAQ,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AAC1C,QAAQ,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AACnD;AACA,QAAQ,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;AAC5B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AAChC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,eAAe,EAAE,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE;AAC1G,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,QAAQ,CAAC;AACT;AACA,KAAK,8DAA8D;AACnE;AACA,EAAE,8DAA8D;AAChE,QAAQ,EAAE,CAAC,OAAO,CAAC,SAAS;AAC5B,QAAQ,8DAA8D;AACtE;AACA,QAAQ,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACzD;AACA,QAAQ,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,YAAY,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC/B,gBAAgB,MAAM,CAAC,CAAC;AACxB,oBAAoB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE;AACxE,gBAAgB,EAAE;AAClB,YAAY,CAAC;AACb,QAAQ,EAAE;AACV;AACA,QAAQ,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,YAAY,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AAChD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AAC3D,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,QAAQ,EAAE;AACV;AACA,QAAQ,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACjD,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC,KAAK,EAAE,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,GAAG,EAAE,EAAE,EAAE,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,EAAE,GAAG,EAAE,GAAG,KAAK,GAAG;AAC7L,YAAY,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACxC,YAAY,CAAC;AACb,gBAAgB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,IAAI,IAAI,EAAE,GAAG,EAAE,GAAG;AACvE,gBAAgB,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,EAAE;AAChD,oBAAoB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC,KAAK,EAAE,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,GAAG,EAAE,EAAE,EAAE,CAAC,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG;AAC5M,gBAAgB,GAAG;AACnB,gBAAgB,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,KAAK,GAAG;AACvC,YAAY,CAAC;AACb,YAAY,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,KAAK,GAAG;AACnC,YAAY,MAAM,CAAC,GAAG,CAAC;AACvB,QAAQ,GAAG;AACX;AACA,QAAQ,8DAA8D;AACtE,QAAQ,EAAE,CAAC,KAAK,CAAC,QAAQ;AACzB,QAAQ,8DAA8D;AACtE;AACA,QAAQ,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACnC,YAAY,WAAW,CAAC,KAAK,GAAG;AAChC,YAAY,WAAW,CAAC,MAAM,CAAC,mBAAmB,EAAE;AACpD;AACA,YAAY,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3C,gBAAgB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAChD,gBAAgB,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AAC5C;AACA,gBAAgB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAChC;AACA,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACvF,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F;AACA,gBAAgB,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE;AACrE,gBAAgB,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACvC;AACA,gBAAgB,KAAK,CAAC,MAAM,CAAC,WAAW,CAAC,aAAa,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACtE,oBAAoB,CAAC,MAAM,CAAC,WAAW,CAAC,aAAa,EAAE;AACvD,oBAAoB,CAAC,MAAM,GAAG;AAC9B;AACA,gBAAgB,EAAE,GAAG,CAAC,KAAK,CAAC,QAAQ;AACpC,gBAAgB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AACzF;AACA,gBAAgB,EAAE,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM;AACpD,gBAAgB,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AACtG,gBAAgB,aAAa,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,oBAAoB,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC5F,oBAAoB,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC;AACzF,gBAAgB,GAAG;AACnB;AACA,eAAe,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AACnC,oBAAoB,GAAG,CAAC,GAAG,CAAC;AAC5B,oBAAoB,YAAY,CAAC,CAAC,CAAC,GAAG;AACtC,oBAAoB,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACvC,wBAAwB,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACvD,4BAA4B,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AACpE,wBAAwB,IAAI;AAC5B,4BAA4B,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AAC3D,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AACtE,gBAAgB,EAAE,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3C,oBAAoB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AACtD,oBAAoB,MAAM,CAAC,KAAK,CAAC;AACjC,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC/D,gBAAgB,CAAC;AACjB;AACA,gBAAgB,8DAA8D;AAC9E,gBAAgB,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACzD;AACA,gBAAgB,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,wBAAwB,GAAG,IAAI,EAAE,IAAI,GAAG;AACrG,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,wBAAwB,GAAG,MAAM,EAAE,CAAC,GAAG;AAC5H;AACA,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACzC;AACA,gBAAgB,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,uBAAuB,GAAG;AAC/E,gBAAgB,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAClE;AACA,gBAAgB,CAAC,CAAC,MAAM,EAAE,IAAI,EAAE;AAChC,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAClD,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE;AACjF;AACA,gBAAgB,EAAE,CAAC,MAAM;AACzB,gBAAgB,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACvE,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC;AAChD,wBAAwB,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG;AAC5E;AACA,oBAAoB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC9C,wBAAwB,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,GAAG;AACvH,wBAAwB,CAAC,IAAI,CAAC,MAAM,EAAE;AACtC;AACA,oBAAoB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACvE,wBAAwB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACrD,wBAAwB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC9F,oBAAoB,CAAC;AACrB,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AACjD,uBAAuB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK;AAClF,gBAAgB,CAAC;AACjB,gBAAgB,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAC5F;AACA,gBAAgB,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC1C,gBAAgB,mBAAmB;AACnC,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,eAAe,CAAC;AAC5C,oBAAoB,CAAC,aAAa,CAAC,aAAa,CAAC;AACjD,oBAAoB,CAAC,YAAY,CAAC,YAAY,EAAE;AAChD;AACA,UAAU,GAAG,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,uBAAuB,CAAC,EAAE;AAChF,kBAAkB,CAAC,KAAK,CAAC,IAAI,EAAE;AAC/B;AACA,UAAU,uBAAuB,CAAC,UAAU,GAAG,IAAI,CAAC,mBAAmB,EAAE;AACzE;AACA,IAAI,8DAA8D;AAClE,gBAAgB,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAChE,gBAAgB,8DAA8D;AAC9E,gBAAgB,EAAE,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM;AAC5C,UAAU,mBAAmB,CAAC,QAAQ,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AACzF,cAAc,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AACnC,kBAAkB,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,kBAAkB,QAAQ,CAAC,QAAQ,CAAC,MAAM,EAAE;AAC5C,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC;AACtB;AACA,kBAAkB,YAAY,CAAC,CAAC,CAAC,KAAK,CAAC;AACvC,cAAc,CAAC;AACf,UAAU,GAAG;AACb;AACA,UAAU,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAChE,cAAc,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACxC,kBAAkB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC7C,cAAc,CAAC;AACf,cAAc,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC1C,cAAc,KAAK,CAAC,MAAM,GAAG;AAC7B,UAAU,GAAG;AACb;AACA,gBAAgB,EAAE,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM;AAC1E,UAAU,mBAAmB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3E,cAAc,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,GAAG;AACpG,cAAc,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC,cAAc,aAAa,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,kBAAkB,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,kBAAkB,EAAE,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC/D,sBAAsB,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,cAAc,GAAG;AACjB,cAAc,QAAQ,CAAC,eAAe,CAAC,aAAa,CAAC,CAAC,QAAQ,EAAE;AAChE,UAAU,GAAG;AACb;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AAC/D,gBAAgB,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD;AACA,oBAAoB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AAC5D,wBAAwB,aAAa,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpE,4BAA4B,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,wBAAwB,GAAG;AAC3B,wBAAwB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACpD,oBAAoB,CAAC;AACrB,oBAAoB,KAAK,CAAC,MAAM,GAAG;AACnC,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,WAAW,CAAC,SAAS,EAAE,wBAAwB,CAAC,SAAS,GAAG;AACxE,YAAY,MAAM,CAAC,KAAK,CAAC;AACzB,QAAQ,CAAC;AACT;AACA,EAAE,8DAA8D;AAChE,QAAQ,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,QAAQ,8DAA8D;AACtE;AACA,QAAQ,mBAAmB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACpF,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AACtB,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AAC/B,gBAAgB,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AACjC,gBAAgB,MAAM,CAAC,CAAC,EAAE;AAC1B,aAAa,CAAC;AACd,YAAY,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE;AAC3B,gBAAgB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9D,oBAAoB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE;AAC7F,oBAAoB,EAAE,CAAC,GAAG,EAAE;AAC5B,wBAAwB,GAAG,CAAC,CAAC,CAAC;AAC9B,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC;AACvF,4BAA4B,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACzC,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,GAAG;AAC1D,wBAAwB,CAAC;AACzB,wBAAwB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG;AACzG,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG;AACrE,aAAa,CAAC;AACd,YAAY,OAAO,CAAC,IAAI,CAAC,EAAE,EAAE,MAAM,CAAC,KAAK,EAAE;AAC3C,QAAQ,GAAG;AACX;AACA,QAAQ,mBAAmB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAClF,YAAY,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AAChC,QAAQ,GAAG;AACX;AACA,QAAQ,mBAAmB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;AACjF,YAAY,OAAO,GAAG;AACtB,QAAQ,GAAG;AACX,GAAG,8DAA8D;AACjE,QAAQ,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAClC,QAAQ,8DAA8D;AACtE;AACA,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AAClC,QAAQ,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAClC,QAAQ,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC;AACxD,QAAQ,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC9B,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAChC,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACzD;AACA,QAAQ,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AAC5C,YAAY,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAChE,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5F,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/F,YAAY,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3G,YAAY,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACjH,YAAY,aAAa,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACpH,YAAY,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACjH,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/F,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrG;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AAC7D,YAAY,MAAM,CAAC,CAAC,CAAC;AACrB,gBAAgB,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;AACpD,gBAAgB,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AAC9C,wBAAwB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC3C,wBAAwB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC1C,oBAAoB,CAAC;AACrB,oBAAoB,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAClF,oBAAoB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtF,oBAAoB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC9E,gBAAgB,CAAC;AACjB,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrE,oBAAoB,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjD,oBAAoB,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AACxC,oBAAoB,mBAAmB,CAAC,KAAK,CAAC,KAAK,EAAE;AACrD,gBAAgB,EAAE;AAClB,QAAQ,GAAG;AACX;AACA,QAAQ,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,mBAAmB,EAAE;AAC5D,QAAQ,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AACpC;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AC3SN,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC5B,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,EAAE;AACzC,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK;AAC/B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC3B,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK;AACpF,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK;AACvB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK;AACvB,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AAC5B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;AACzB,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,KAAK;AACpC,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AAC5B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;AAC1B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;AAC1B,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;AACzB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,SAAS,EAAE;AACzJ,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG;AAClB,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACtB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ;AACrB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK;AACnE,gBAAgB,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM;AACvE,gBAAgB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AACxE,gBAAgB,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;AACtC,gBAAgB,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;AACtC,gBAAgB,CAAC;AACjB;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;AACvC,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC;AAChD,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAC1D,oBAAoB,eAAe,CAAC,IAAI,CAAC,KAAK,EAAE;AAChD,oBAAoB,eAAe,CAAC,IAAI,CAAC,KAAK,EAAE;AAChD,gBAAgB,CAAC;AACjB,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,EAAE,CAAC,WAAW,EAAE;AAChC,oBAAoB,eAAe,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AAChH,oBAAoB,eAAe,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AAChH,oBAAoB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC1G,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,eAAe,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AAChG,oBAAoB,eAAe,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AAChG,oBAAoB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AAC1F,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,EAAE;AACzE,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,GAAG,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE;AACxG,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,GAAG;AACnE,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AAC7D;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AACvH,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK;AAC7H;AACA,YAAY,EAAE;AACd,YAAY,SAAS,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gBAAgB,QAAQ,CAAC,UAAU,EAAE;AACrC,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC;AAC5B,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AAClC,oBAAoB,EAAE,CAAC,CAAC,EAAE;AAC1B,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,IAAI,CAAC,CAAC,CAAC,GAAG;AACtB,YAAY,QAAQ,CAAC,CAAC,CAAC,GAAG;AAC1B,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACtD;AACA,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,GAAG,WAAW,CAAC,eAAe,CAAC,CAAC,GAAG;AACvE,gBAAgB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,GAAG,WAAW,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/E;AACA,gBAAgB,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AAC3C,oBAAoB,GAAG,CAAC,UAAU,CAAC,UAAU,EAAE;AAC/C,oBAAoB,OAAO,CAAC,UAAU,CAAC,UAAU,EAAE;AACnD,gBAAgB,CAAC;AACjB,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AACzC,oBAAoB,GAAG,CAAC,QAAQ,CAAC,QAAQ,EAAE;AAC3C,oBAAoB,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE;AAC/C,gBAAgB,CAAC;AACjB,gBAAgB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC5B,oBAAoB,GAAG,CAAC,WAAW,CAAC,eAAe,CAAC,CAAC,GAAG;AACxD,oBAAoB,OAAO,CAAC,WAAW,CAAC,eAAe,CAAC,CAAC,GAAG;AAC5D,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,GAAG,CAAC,YAAY,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC;AACvD,oBAAoB,GAAG,CAAC,YAAY,CAAC,YAAY,EAAE;AACnD,oBAAoB,OAAO,CAAC,YAAY,CAAC,YAAY,EAAE;AACvD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;AAC/B,gBAAgB,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE;AACvC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO;AAC9D,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,EAAE;AACrC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC;AAC3B,gBAAgB,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG;AACxE;AACA,YAAY,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AACvC,YAAY,EAAE,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC3C,gBAAgB,GAAG,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACvC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACpE,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACjC,gBAAgB,KAAK,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,GAAG;AACnE;AACA,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE;AAC5C,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AACnD,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,wBAAwB,MAAM,CAAC,KAAK,CAAC;AACrC,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AACjI,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI;AAC5E,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,wBAAwB,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AAClE,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE;AACjF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE;AAC1F;AACA,YAAY,MAAM,CAAC,IAAI,GAAG,MAAM,GAAG;AACnC,YAAY,SAAS,CAAC,IAAI,GAAG,MAAM,GAAG;AACtC;AACA,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG;AAChD,YAAY,EAAE,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AACzC,YAAY,EAAE,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AACvD,gBAAgB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAClC,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,UAAU,EAAE;AAC/D,wBAAwB,CAAC,QAAQ,CAAC,EAAE,CAAC;AACrC,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,GAAG;AAChD,gBAAgB,CAAC;AACjB,gBAAgB,QAAQ,CAAC,gBAAgB,EAAE;AAC3C,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACjC,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AACzD,oBAAoB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AACxE,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf,YAAY,EAAE,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AACxD,gBAAgB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAClC,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,GAAG,UAAU,EAAE;AAC/D,wBAAwB,CAAC,QAAQ,CAAC,EAAE,CAAC;AACrC,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG;AAC5C,gBAAgB,CAAC;AACjB,gBAAgB,QAAQ,CAAC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACnE,YAAY,GAAG;AACf,YAAY,EAAE,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,gBAAgB,QAAQ,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACpE,YAAY,GAAG;AACf,YAAY,EAAE,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,gBAAgB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC,gBAAgB,QAAQ,CAAC,YAAY,EAAE;AACvC,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACjC,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AACzD,oBAAoB,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACpC,oBAAoB,OAAO,CAAC,CAAC,OAAO;AACpC,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf,YAAY,EAAE,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,gBAAgB,QAAQ,CAAC,eAAe,EAAE;AAC1C,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACjC,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE;AACxD,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,MAAM,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAC5E,YAAY,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAC9E;AACA,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5D,gBAAgB,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,YAAY,GAAG;AACf;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE;AACjC,gBAAgB,CAAC,UAAU,EAAE;AAC7B,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACnC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AAClE,gBAAgB,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,QAAQ,EAAE;AAC1C;AACA,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAC7B,gBAAgB,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK;AAC7C,gBAAgB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG;AACnC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAC1D,oBAAoB,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG;AAC5C;AACA,oBAAoB,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AACxC,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,4BAA4B,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC,EAAE,WAAW,IAAI;AAC3F,4BAA4B,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,UAAU,EAAE;AAC1F,4BAA4B,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,QAAQ,EAAE;AACpF,wBAAwB,CAAC;AACzB,oBAAoB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC;AACxC,4BAA4B,SAAS,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,EAAE;AACxD,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,SAAS,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,OAAO,EAAE,EAAE,CAAC,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3F,oBAAoB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAChD;AACA,oBAAoB,KAAK,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,wBAAwB,EAAE,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC;AACjD,4BAA4B,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AAC5F,4BAA4B,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AAC5F,4BAA4B,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE;AAChG,4BAA4B,EAAE,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5E,gCAAgC,WAAW,CAAC,EAAE,CAAC,EAAE,CAAC;AAClD,4BAA4B,CAAC,CAAC,IAAI,CAAC,CAAC;AACpC,gCAAgC,WAAW,CAAC,EAAE,CAAC,EAAE,CAAC;AAClD,4BAA4B,CAAC;AAC7B,4BAA4B,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AAC7G,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AAChF,4BAA4B,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AAChF,4BAA4B,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAChF,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB;AACA,oBAAoB,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE;AACxC,wBAAwB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,EAAE,GAAG,EAAE;AAChD,wBAAwB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,EAAE,GAAG,EAAE;AAC9C,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AACtC,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE;AACvC;AACA,oBAAoB,KAAK,CAAC,MAAM,EAAE,IAAI,EAAE;AACxC,wBAAwB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO;AAChN,wBAAwB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,EAAE,GAAG,EAAE;AAC9C,gBAAgB,GAAG;AACnB;AACA,gBAAgB,GAAG,CAAC,iBAAiB,CAAC,CAAC,CAAC,GAAG;AAC3C,gBAAgB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;AACnC,gBAAgB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC;AAC3D,oBAAoB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AACnI,gBAAgB,EAAE;AAClB,gBAAgB,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE;AACvE,gBAAgB,EAAE;AAClB;AACA,gBAAgB,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxG,oBAAoB,EAAE,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC;AAC7C,wBAAwB,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AACxF,wBAAwB,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AACxF,wBAAwB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE;AAC5F,wBAAwB,EAAE,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AACxE,4BAA4B,WAAW,CAAC,EAAE,CAAC,EAAE,CAAC;AAC9C,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,WAAW,CAAC,EAAE,CAAC,EAAE,CAAC;AAC9C,wBAAwB,CAAC;AACzB,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI;AACzG,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC;AAC5B,wBAAwB,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AAC5E,wBAAwB,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU;AAC5E;AACA,wBAAwB,EAAE;AAC1B,wBAAwB,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;AAC9G,wBAAwB,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC;AAC5G,wBAAwB,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AAC5E,wBAAwB,EAAE;AAC1B,wBAAwB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE;AAC9D,wBAAwB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,EAAE;AAC5D,wBAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;AACnE,4BAA4B,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,EAAE;AAChE,4BAA4B,EAAE,CAAC,CAAC,iBAAiB,CAAC,OAAO,EAAE,CAAC,CAAC;AAC7D,gCAAgC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC;AACvD,4BAA4B,CAAC;AAC7B,4BAA4B,iBAAiB,CAAC,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5E,wBAAwB,CAAC;AACzB,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AAC1D,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB;AACA,gBAAgB,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE;AAClD,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,wBAAwB,EAAE,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO;AAC3F,wBAAwB,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AAC7H,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,wBAAwB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,EAAE;AAC5D,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACvC,wBAAwB,EAAE,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,MAAM,CAAC,GAAG;AAC5E;AACA,wBAAwB,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC7D,4BAA4B,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,gCAAgC,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;AACpD,gCAAgC,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;AACtD,gCAAgC,CAAC,OAAO,EAAE,CAAC,WAAW,CAAC,OAAO,CAAC;AAC/D,4BAA4B,GAAG;AAC/B,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAChD,gCAAgC,IAAI,CAAC,CAAC,GAAG,EAAE;AAC3C,oCAAoC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;AACzD,oCAAoC,KAAK,CAAC;AAC1C,gCAAgC,IAAI,CAAC,CAAC,KAAK,EAAE;AAC7C,oCAAoC,KAAK,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,GAAG;AACtE,oCAAoC,KAAK,CAAC;AAC1C,gCAAgC,IAAI,CAAC,CAAC,OAAO,EAAE;AAC/C,oCAAoC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,OAAO,EAAE;AACpE,oCAAoC,KAAK,CAAC;AAC1C,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC;AACzB,wBAAwB,MAAM,CAAC,KAAK,CAAC;AACrC,oBAAoB,EAAE;AACtB,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA;AACA,YAAY,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC;AAChF,YAAY,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACxE,gBAAgB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACvC,gBAAgB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChE,gBAAgB,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AACtE,gBAAgB,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;AACzD,gBAAgB,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,oBAAoB,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG;AAC3C,gBAAgB,EAAE;AAClB,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,GAAG,CAAC,SAAS,GAAG;AAC/C,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,UAAU,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvG,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AACvG,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC/E,QAAQ,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC3E,QAAQ,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACvF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACvF,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7F,QAAQ,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACpG,QAAQ,kBAAkB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,IAAI;AACnH,QAAQ,KAAK,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzF,QAAQ,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACrG;AACA,QAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpF,YAAY,aAAa,CAAC,CAAC,CAAC;AAC5B,YAAY,EAAE,CAAC,UAAU,EAAE,gBAAgB,EAAE,CAAC,CAAC,GAAG,CAAC,aAAa,CAAC,OAAO,GAAG;AAC3E,QAAQ,GAAG;AACX,QAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,kBAAkB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACtF,YAAY,aAAa,CAAC,CAAC,CAAC;AAC5B,YAAY,EAAE,CAAC,UAAU,EAAE,kBAAkB,EAAE,CAAC,CAAC,GAAG,CAAC,aAAa,CAAC,OAAO,GAAG;AAC7E,QAAQ,GAAG;AACX,QAAQ,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/E,YAAY,WAAW,CAAC,CAAC,CAAC;AAC1B,YAAY,EAAE,CAAC,UAAU,EAAE,WAAW,GAAG,GAAG,CAAC,WAAW,CAAC,OAAO,GAAG;AACnE,QAAQ,GAAG;AACX;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AACnF,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACrF,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtF,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AACpF,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC5D,YAAY,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE;AAC/B,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvC,QAAQ,GAAG;AACX,QAAQ,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClF,YAAY,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE;AAClC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACnbF,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACjC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,GAAG;AAC9B,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG;AACpC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AACtC;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,KAAK;AACpC,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAChC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,GAAG,SAAS,EAAE;AAC1E,QAAQ,CAAC;AACT;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,aAAa,CAAC,KAAK,CAAC;AAC7B,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,GAAG,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3C,QAAQ,GAAG;AACX;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE;AACpE,YAAY,EAAE;AACd,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AAC7C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,YAAY,CAAC;AACb,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ;AACrB,IAAI,8DAA8D;AAClE;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE;AAChC;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE;AAC9E,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACzD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,GAAG,CAAC,KAAK,CAAC,QAAQ;AAChC,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AACxC,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AAClD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,GAAG,IAAI,EAAE,IAAI,GAAG;AACjF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,GAAG,MAAM,EAAE,CAAC,GAAG;AACxG,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,GAAG;AAC3D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,KAAK,CAAC,CAAC,cAAc,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI;AAChE;AACA,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AACjD,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC;AACpC,wBAAwB,CAAC,IAAI,CAAC,MAAM,EAAE;AACtC;AACA,oBAAoB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACvE,wBAAwB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACrD,wBAAwB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC9F,oBAAoB,CAAC;AACrB;AACA,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AACjD,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAChF,gBAAgB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;AACxD,oBAAoB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,KAAK,GAAG;AACjE,oBAAoB,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC3D,wBAAwB,WAAW,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,oBAAoB,CAAC;AACrB,oBAAoB,MAAM,CAAC,MAAM,CAAC,eAAe,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,IAAI;AAChE,oBAAoB,MAAM,CAAC,KAAK,CAAC,WAAW,EAAE;AAC9C,oBAAoB,cAAc,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,GAAG;AACrD;AACA,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AACjD,wBAAwB,CAAC,KAAK,CAAC,IAAI,CAAC;AACpC,wBAAwB,CAAC,IAAI,CAAC,MAAM,CAAC;AACrC,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,GAAG,CAAC,IAAI;AACnF,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,YAAY,GAAG,CAAC,KAAK,CAAC,cAAc,EAAE,MAAM,CAAC,eAAe,EAAE;AAC9D,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,OAAO,GAAG,KAAK,EAAE,IAAI,GAAG;AAChE,YAAY,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE;AAC7C;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC3C,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,CAAC;AACjB,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AACvE,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,QAAQ,CAAC,SAAS,GAAG;AACpD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/D,QAAQ,GAAG,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACrC,YAAY,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,EAAE;AACvC,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AAC7B,YAAY,OAAO,CAAC,CAAC,GAAG,CAAC,OAAO;AAChC,QAAQ,EAAE;AACV,QAAQ,EAAE,CAAC,EAAE,kBAAkB,CAAC,CAAC,CAAC;AAClC,YAAY,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AAC/B,YAAY,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC;AACtC,QAAQ,CAAC;AACT,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,GAAG,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC9D,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,GAAG,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC/D,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;AACpB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS;AAC/E,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,gBAAgB,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACxG,QAAQ,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,eAAe,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACzG,QAAQ,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,eAAe,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACzG,QAAQ,kBAAkB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,GAAG,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,IAAI;AACrH,QAAQ,UAAU,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,WAAW,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7G,QAAQ,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,OAAO,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AACjH,QAAQ,YAAY,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,SAAS,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AAC/G;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,YAAY,GAAG,CAAC,KAAK,CAAC,KAAK,EAAE;AAC7B,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,GAAG,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACnC,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,EAAE;AACV,IAAI,GAAG;AACP,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE;AACxC,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC1PF,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC/B,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,EAAE,CAAC,OAAO,CAAC;AACf,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC;AAC3C,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM;AAC7D;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACpB,QAAQ,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;AACvB,QAAQ,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACxB,QAAQ,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtB,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG;AACnB,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG;AACnB,QAAQ,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC;AAC1B;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;AACvC,QAAQ,gBAAgB,GAAG;AAC3B,QAAQ,iBAAiB,GAAG;AAC5B,QAAQ,mBAAmB,GAAG;AAC9B,QAAQ,iBAAiB,CAAC,UAAU,EAAE;AACtC,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC/B,QAAQ,iBAAiB,GAAG;AAC5B,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,SAAS,CAAC;AACxF,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAC3B,QAAQ,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B;AACA,QAAQ,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B;AACA,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC;AAC9C,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAChC,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE;AAClD,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE;AACnC,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE;AACvC,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE;AAC9C,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE;AACrC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;AACtC,YAAY,MAAM,CAAC,QAAQ,CAAC;AAC5B,QAAQ,CAAC;AACT;AACA,QAAQ,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,YAAY,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC;AACpD,YAAY,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,MAAM,CAAC,IAAI,CAAC;AACxB,QAAQ,EAAE;AACV;AACA,QAAQ,MAAM,CAAC,IAAI,CAAC;AACpB,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;AAC1C,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACjC,QAAQ,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,IAAI,EAAE;AACN;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AAC9D,IAAI,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC;AAC/E,IAAI,QAAQ,CAAC,gBAAgB,EAAE,CAAC,CAAC;AACjC,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC;AACnD,YAAY,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG;AAClC,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC;AACnD,YAAY,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG;AAClC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AACrC,gBAAgB,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AACrC,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE;AACtF,YAAY,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE;AACtF,YAAY,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,EAAE;AAC1C,YAAY,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,EAAE;AAC1C,QAAQ,GAAG;AACX,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC;AAC7E,IAAI,QAAQ,CAAC,iBAAiB,EAAE,CAAC,CAAC;AAClC,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AAClC,gBAAgB,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE;AAChD,gBAAgB,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC;AAC/C,YAAY,EAAE;AACd,QAAQ,GAAG;AACX,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AACjE,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC;AAC7E,IAAI,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK;AACpE,IAAI,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC;AACrE,IAAI,QAAQ,CAAC,mBAAmB,EAAE,CAAC,CAAC;AACpC,QAAQ,EAAE;AACV,QAAQ,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC,YAAY,SAAS,CAAC;AACtB,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClB;AACA,QAAQ,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC;AACnC,QAAQ,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC;AACpG,QAAQ,EAAE;AACV,QAAQ,KAAK,CAAC,CAAC,cAAc,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3D,YAAY,SAAS,CAAC,CAAC,CAAC,GAAG;AAC3B,YAAY,cAAc,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACnD,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,gBAAgB,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;AACpC,gBAAgB,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACzD,oBAAoB,EAAE,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,wBAAwB,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AACpD,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf,YAAY,cAAc,CAAC,CAAC,CAAC,SAAS,CAAC;AACvC,YAAY,EAAE,CAAC,CAAC;AAChB,YAAY,EAAE;AACd,QAAQ,CAAC;AACT;AACA,QAAQ,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC;AAC1D,QAAQ,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACzB,YAAY,cAAc,CAAC,CAAC,EAAE;AAC9B,QAAQ,CAAC;AACT;AACA,QAAQ,iBAAiB,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAC3D,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,gBAAgB,EAAE,CAAC,CAAC;AACjC,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3C,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1F,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3C,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,iBAAiB,CAAC,EAAE,CAAC,CAAC,CAAC;AACpC,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AACzB,QAAQ,GAAG;AACX,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AACpD,IAAI,QAAQ,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC,CAAC;AAC5C,QAAQ,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC;AACjC,QAAQ,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACtC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7C,YAAY,CAAC,QAAQ,CAAC,EAAE,CAAC,SAAS,CAAC;AACnC,YAAY,CAAC,OAAO,CAAC,KAAK,CAAC;AAC3B,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG;AACnD;AACA,QAAQ,EAAE;AACV,QAAQ,mBAAmB,GAAG;AAC9B,QAAQ,iBAAiB,GAAG;AAC5B,QAAQ,iBAAiB,GAAG;AAC5B,QAAQ,GAAG,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC3D,YAAY,gBAAgB,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE;AAC3C,YAAY,iBAAiB,GAAG;AAChC,YAAY,iBAAiB,GAAG;AAChC,YAAY,gBAAgB,CAAC,KAAK,EAAE;AACpC,YAAY,iBAAiB,GAAG;AAChC,YAAY,iBAAiB,GAAG;AAChC,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,mBAAmB,EAAE,CAAC,CAAC;AACxC,YAAY,EAAE,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC;AACjD,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7D,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE;AAC3F,YAAY,GAAG;AACf;AACA,YAAY,cAAc,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACpD,gBAAgB,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;AAC9C,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,gBAAgB,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;AAC1C,YAAY,GAAG;AACf,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC1C,YAAY,cAAc,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAC7D,gBAAgB,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9C,oBAAoB,EAAE,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC;AAClD,wBAAwB,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC;AAC/G,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE;AAC3G,wBAAwB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC;AAC7D,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3C,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC;AAC5E,YAAY,CAAC;AACb,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC1C,YAAY,cAAc,CAAC,KAAK,GAAG,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACtE,gBAAgB,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AAC9C,oBAAoB,EAAE,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC;AAClD,wBAAwB,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC;AACzG,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE;AAC3G,wBAAwB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC;AAC7D,oBAAoB,CAAC;AACrB,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC;AAC3C,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC;AAC5E,YAAY,CAAC;AACb,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,iBAAiB,EAAE,CAAC,CAAC;AACtC,YAAY,cAAc,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACpD,gBAAgB,GAAG,CAAC,IAAI,CAAC;AACzB,oBAAoB,EAAE,CAAC;AACvB,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,oBAAoB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;AACrC,oBAAoB,CAAC,CAAC;AACtB;AACA,gBAAgB,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC;AACnD,gBAAgB,KAAK,CAAC,IAAI,CAAC,cAAc,EAAE;AAC3C,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AACzC,oBAAoB,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AACpC,oBAAoB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACrC,oBAAoB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AAC7C,oBAAoB,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC;AACxD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC;AACnF,gBAAgB,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AAChD,gBAAgB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AACtC;AACA,oBAAoB,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;AAC1D,oBAAoB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAClD,wBAAwB,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AACxC,wBAAwB,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC;AACjE,wBAAwB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AACjD,wBAAwB,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACpC,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB,YAAY,GAAG;AACf,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,QAAQ,CAAC;AACT,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACvF,IAAI,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACvD,IAAI,QAAQ,CAAC,iBAAiB,EAAE,CAAC,CAAC;AAClC,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,oBAAoB,EAAE;AACxD,YAAY,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,oBAAoB,EAAE;AACxD,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,YAAY,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACrD,gBAAgB,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7B,gBAAgB,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC;AAC9B,YAAY,GAAG;AACf,YAAY,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACrD,gBAAgB,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7B,gBAAgB,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC;AAC9B,YAAY,GAAG;AACf,QAAQ,GAAG;AACX;AACA,QAAQ,QAAQ,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,YAAY,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC;AACT;AACA,QAAQ,QAAQ,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,YAAY,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC;AACT,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC;AAC/B,IAAI,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACvB,QAAQ,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;AACvB,IAAI,CAAC;AACL;AACA,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,EAAE;AACvD,IAAI,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACzC,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,GAAG,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,IAAI;AAC9F,QAAQ,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC/F,QAAQ,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,OAAO,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzF,QAAQ,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,MAAM,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACzF,QAAQ,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,QAAQ,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AACxF,QAAQ,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC9F;AACA,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,IAAI,SAAS,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,KAAK;AAC3F,QAAQ,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,QAAQ,KAAK,SAAS,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,KAAK;AACjF,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,KAAK,IAAI,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC9E,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG;AACxC,gBAAgB,MAAM,CAAC,CAAC,CAAC;AACzB,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,QAAQ,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,KAAK,MAAM,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC9E,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG;AACxC,gBAAgB,IAAI,CAAC,CAAC,CAAC;AACvB,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,IAAI,GAAG;AAC1B,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,EAAE;AACjC;AACA,IAAI,MAAM,CAAC,MAAM,CAAC;AAClB,EAAE;ACtUF,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACpC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,EAAE,CAAC,OAAO,CAAC;AACf,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC;AAC3C,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM;AAC7D;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE;AACxB,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,EAAE,EAAE;AAC3B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACzB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS;AAC5B,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,GAAG,IAAI,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM;AACnE,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,QAAQ,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC7C,IAAI,EAAE;AACN,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,GAAG;AACtC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAChC,QAAQ,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,EAAE;AAC9E,IAAI,EAAE;AACN,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpC,QAAQ,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,IAAI,CAAC,KAAK;AAC1D,IAAI,EAAE;AACN,IAAI,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACtC,QAAQ,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,EAAE;AACzC,IAAI,EAAE;AACN,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAChC,QAAQ,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,EAAE;AAC/C,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;AAChD,QAAQ,OAAO,CAAC,MAAM,EAAE,IAAI,EAAE;AAC9B,YAAY,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AACrD,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;AAC1C,YAAY,CAAC,IAAI,CAAC,OAAO,EAAE;AAC3B,IAAI,EAAE;AACN;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK,CAAC;AACtB,oBAAoB,CAAC;AACrB,wBAAwB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG;AACtD,wBAAwB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG;AACtD,wBAAwB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG;AACtD,wBAAwB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG;AACtD,wBAAwB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG;AACtD,wBAAwB,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AACrD,oBAAoB,EAAE;AACtB,gBAAgB,KAAK,CAAC;AACtB,oBAAoB,CAAC;AACrB,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE;AAClE,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE;AAClE,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE;AAClE,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE;AAClE,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,EAAE;AACjE,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,EAAE;AACjE,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,EAAE;AACjE,wBAAwB,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC;AAChE,oBAAoB,CAAC;AACrB,YAAY,EAAE;AACd;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ;AAC7B,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AACpC,YAAY,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,CAAC;AACtC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK;AACrC,YAAY,EAAE,CAAC;AACf,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,GAAG,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;AACnF,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,GAAG,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC;AAChF,YAAY,EAAE;AACd,gBAAgB,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS;AACzC,YAAY,EAAE,CAAC;AACf,gBAAgB,IAAI,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5D,gBAAgB,IAAI,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;AACzD,YAAY,CAAC,CAAC,CAAC;AACf,gBAAgB,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC;AACrC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK;AACzB,YAAY,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC;AAC9B,gBAAgB,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE;AAC3F,gBAAgB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,GAAG;AAC3F,gBAAgB,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,GAAG;AAC7E,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM;AACvC,YAAY,EAAE,EAAE,aAAa,CAAC,CAAC,CAAC;AAChC,gBAAgB,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,SAAS,GAAG;AAC1D,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ;AAClC;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AAChD,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,GAAG,EAAE;AAC7C,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,KAAK,CAAC;AACrC,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,MAAM,CAAC;AACvC,gBAAgB,CAAC,MAAM,EAAE,CAAC,EAAE;AAC5B,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG;AAC9D;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU;AAChD,YAAY,MAAM;AAClB,gBAAgB,CAAC,SAAS,CAAC,SAAS,CAAC;AACrC,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC;AACzC,gBAAgB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,MAAM,GAAG;AACvC;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,GAAG;AACrC;AACA,YAAY,MAAM;AAClB,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;AAClC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;AAClC,gBAAgB,CAAC,MAAM,CAAC,EAAE,CAAC;AAC3B,gBAAgB,CAAC,MAAM,CAAC,MAAM,EAAE;AAChC;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK;AAC/B,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,EAAE;AACzD,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;AACjC,gBAAgB,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACvC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,EAAE;AACtC,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC;AAChC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE;AACjF,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG;AACzD;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM;AAClC,YAAY,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE;AAChC,gBAAgB,CAAC,IAAI,CAAC,SAAS,EAAE;AACjC;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK;AAC/B,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,EAAE;AACzD,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;AACjC,gBAAgB,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,EAAE;AACtC,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE;AAChG,gBAAgB,CAAC,IAAI,CAAC;AACtB,oBAAoB,EAAE,CAAC,QAAQ;AAC/B,wBAAwB,CAAC,IAAI,EAAE;AAC/B,wBAAwB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AAC1D,wBAAwB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrD,4BAA4B,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,EAAE;AAC9D,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,EAAE,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC;AAC7C,gBAAgB,EAAE;AAClB;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK;AAC/C,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE;AAC/B,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;AAC7D,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,MAAM,CAAC,SAAS,GAAG;AAClD,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,aAAa,CAAC;AAC7C,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,eAAe,CAAC;AACjD,gBAAgB,CAAC,MAAM,EAAE,KAAK,EAAE;AAChC,gBAAgB,CAAC,IAAI,CAAC,SAAS,EAAE;AACjC;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK;AAC7C,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE;AAC/B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5D,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE;AACpC,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,EAAE;AAC3C,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,IAAI,CAAC;AACxC,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;AACrD,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAChE,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,GAAG;AAClD,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,KAAK,GAAG;AAC9C;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK;AAChD,YAAY,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,EAAE,SAAS,EAAE;AACjD,gBAAgB,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;AAC1E,gBAAgB,CAAC,CAAC,CAAC,CAAC,KAAK;AACzB,gBAAgB,MAAM,CAAC,QAAQ,GAAG;AAClC,gBAAgB,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE;AACrC,YAAY,CAAC;AACb,QAAQ,GAAG;AACX;AACA,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,OAAO,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC5F,QAAQ,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,OAAO,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC5F,QAAQ,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAC7F,QAAQ,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAC7F,QAAQ,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,GAAG,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAChG,QAAQ,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,GAAG,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAChG,QAAQ,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAClG,QAAQ,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,GAAG;AAC5F;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,aAAa,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,aAAa,CAAC;AAC1F,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,eAAe,CAAC;AAC5F,YAAY,SAAS,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,OAAO,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,SAAS,CAAC;AACtF,QAAQ,EAAE;AACV;AACA,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC1OF;AACA,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAC7D,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK;AACjE,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,IAAI;AACjC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AAClH,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC1C,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC1C,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI;AAChG,QAAQ,CAAC,CAAC,IAAI,SAAS,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAClF,QAAQ,CAAC,CAAC,IAAI,SAAS,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAClF,QAAQ,CAAC,CAAC,OAAO,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI;AAC5F,QAAQ,CAAC,CAAC,QAAQ,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AAClG,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE;AACtG,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxE,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AAC3E,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,YAAY;AACjG,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG;AACjH,QAAQ,CAAC,CAAC,OAAO,MAAM,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK;AACvI,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO;AACjF,QAAQ,CAAC,CAAC,QAAQ,KAAK,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AAC5E,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,WAAW;AACzH,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK;AAC3D,QAAQ,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK;AACvG,QAAQ,CAAC,CAAC,OAAO,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC;AACpF,QAAQ,CAAC,CAAC,OAAO,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM;AAClD,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;AACjD,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;AACjD,QAAQ,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM;AAC3D,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,KAAK;AAC9B,QAAQ,CAAC,CAAC,QAAQ,KAAK,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,SAAS,EAAE;AAC3H,QAAQ,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,QAAQ,KAAK,CAAC,CAAC,GAAG;AAC5B,QAAQ,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,GAAG;AACtC,QAAQ,CAAC,CAAC,UAAU,IAAI,CAAC,CAAC,KAAK;AAC/B,QAAQ,CAAC;AACT;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM;AACnD,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,SAAS;AACnB,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,QAAQ;AACzI,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC;AAChE,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC;AACpC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC;AACT;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC;AACrB,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvC,QAAQ,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,GAAG;AAC9C,QAAQ,MAAM,CAAC,GAAG,CAAC;AACnB,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC;AACrB,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvC,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE;AAC3B,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC;AACxB,YAAY,KAAK,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAChC,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1B,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,YAAY,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE;AAC/B,YAAY,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG;AAC/C,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,CAAC,CAAC;AACnE,gBAAgB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACjC,gBAAgB,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;AAC7B,YAAY,CAAC;AACb,QAAQ,CAAC;AACT,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS;AAC/D,YAAY,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,gBAAgB,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACvD,oBAAoB,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC;AACtF,YAAY,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,OAAO;AACpF,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,WAAW;AACxN,gBAAgB,EAAE,CAAC,KAAK,CAAC;AACzB,oBAAoB,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,wBAAwB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3D,4BAA4B,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrF,wBAAwB,EAAE;AAC1B,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB;AACA,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG;AACzG;AACA,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE;AACnC,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,cAAc,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE,GAAG;AAC1M,YAAY,EAAE,CAAC,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG;AAC9I,YAAY,IAAI;AAChB,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG;AACvD;AACA,aAAa,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AAC5B,oBAAoB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AACjG,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC;AACjC,wBAAwB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjF,4BAA4B,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,4BAA4B,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,wBAAwB,GAAG,MAAM,CAAC,MAAM,GAAG;AAC3C,wBAAwB,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AAC/D,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,wBAAwB,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,MAAM,GAAG;AAClH,wBAAwB,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AAC/D,gBAAgB,CAAC;AACjB;AACA,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,SAAS,GAAG;AACjH,gBAAgB,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,cAAc,EAAE;AACpD;AACA,YAAY,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK;AAC3H,YAAY,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAC7F;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;AAChD,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1G,oBAAoB,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACvC;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC;AACxC,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC;AACxC,gBAAgB,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,YAAY,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB;AACA,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;AAC/E;AACA,YAAY,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC;AACrC,YAAY,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC;AACxC;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC;AAClE;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,GAAG,IAAI,EAAE,IAAI,GAAG;AAChF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE;AAC7G,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,WAAW,EAAE;AACzD,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG;AAC1D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG;AAC/D,YAAY,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG;AAClE;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACjD,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE;AAC/B,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI;AAC3D,gBAAgB;AAChB,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACxD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC;AACnD,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAClF;AACA,YAAY,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AACjF;AACA,YAAY,QAAQ,CAAC,sBAAsB,EAAE,CAAC,CAAC;AAC/C,gBAAgB,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG;AAC9E,gBAAgB,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE;AAC9E,gBAAgB,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AACpC;AACA,gBAAgB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC/C;AACA,gBAAgB,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO;AAC3E,gBAAgB,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,oBAAoB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAClF,4BAA4B,MAAM,CAAC,KAAK,CAAC,MAAM;AAC/C,gCAAgC,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAClE,oCAAoC,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ;AACrF,oCAAoC,EAAE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS;AACtF,oCAAoC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC;AACrH,qCAAqC,EAAE;AACvC,oCAAoC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;AACpE,oCAAoC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;AACpE;AACA,oCAAoC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5F,4CAA4C,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5F,wCAAwC,UAAU,CAAC;AACnD,wCAAwC,UAAU,CAAC,CAAC,KAAK,EAAE,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,UAAU;AAC7I,gCAAgC,EAAE;AAClC,gCAAgC,CAAC,MAAM,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAC1E,oCAAoC,MAAM,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,OAAO,CAAC;AACtJ,gCAAgC,EAAE;AAClC,wBAAwB,EAAE;AAC1B,oBAAoB,EAAE;AACtB;AACA,oBAAoB,EAAE,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI;AAC5F,oBAAoB,EAAE,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,wBAAwB,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI;AACtH,wBAAwB,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG;AAC1F,wBAAwB,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG;AAC1F,wBAAwB,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG;AAC1F,wBAAwB,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG;AAC1F,oBAAoB,CAAC;AACrB;AACA,oBAAoB,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK;AACrF,oBAAoB,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG;AACzE,oBAAoB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE;AAClD,wBAAwB,EAAE,EAAE,EAAE,EAAE,EAAE;AAClC,wBAAwB,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;AAC1C,wBAAwB,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;AACjD,wBAAwB,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC;AACxC,oBAAoB,GAAG;AACvB;AACA,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChF,wBAAwB,MAAM,CAAC,CAAC;AAChC,4BAA4B,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE;AACnD,4BAA4B,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE;AACrD,4BAA4B,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC;AACnD,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB;AACA,oBAAoB,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI;AACzE,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG,SAAS,EAAE,IAAI,GAAG,MAAM,GAAG;AAC9E,oBAAoB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG,SAAS,EAAE,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE;AACpG,oBAAoB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,UAAU;AAChD,wBAAwB,CAAC,KAAK,GAAG,MAAM,EAAE,GAAG,CAAC,IAAI,EAAE;AACnD,wBAAwB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,4BAA4B,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACrE,gCAAgC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,4BAA4B,IAAI;AAChC,gCAAgC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACpE,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,4BAA4B,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,EAAE;AACnD,wBAAwB,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,GAAG,EAAE,KAAK,CAAC,KAAK,CAAC,EAAE;AAClG,wBAAwB,CAAC;AACzB;AACA,oBAAoB,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM;AAC5D,oBAAoB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AACtC,wBAAwB,WAAW,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE;AACxE,4BAA4B,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,4BAA4B,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AACvD,4BAA4B,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG;AAClE,oBAAoB,CAAC;AACrB;AACA,oBAAoB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AACtC,wBAAwB,EAAE,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC;AACpE,wBAAwB,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM;AAC3E,wBAAwB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG,SAAS,MAAM,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG;AACjH,wBAAwB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG,SAAS,EAAE,QAAQ,GAAG,IAAI,CAAC,QAAQ,EAAE;AAC7G,wBAAwB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,UAAU;AACpD,4BAA4B,CAAC,KAAK,GAAG,MAAM,EAAE,GAAG,CAAC,QAAQ,EAAE;AAC3D,4BAA4B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG,EAAE,KAAK,CAAC,GAAG;AACtF,4BAA4B,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE;AACjD,4BAA4B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AACrE,4BAA4B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AACrE,4BAA4B,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,UAAU,EAAE;AACnD,oBAAoB,CAAC;AACrB;AACA,oBAAoB,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AACzE,wBAAwB,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAClD,wBAAwB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACpD,wBAAwB,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;AACzD,wBAAwB,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,EAAE;AAC5D,wBAAwB,KAAK,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;AACjE;AACA,wBAAwB,EAAE,CAAC,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;AAC9D,wBAAwB,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE;AACjD,wBAAwB,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE;AACjD;AACA,wBAAwB,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO;AACjG,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,GAAG,qBAAqB,GAAG;AAC3E,wBAAwB,GAAG,CAAC,SAAS,EAAE,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,eAAe,CAAC,SAAS,CAAC;AAClG,wBAAwB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,eAAe,CAAC,UAAU,CAAC;AACnG;AACA,wBAAwB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnC,4BAA4B,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;AACrG,4BAA4B,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE;AAChG,wBAAwB,EAAE;AAC1B;AACA,wBAAwB,SAAS,EAAE;AACnC,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC;AACzC,4BAA4B,MAAM,CAAC,CAAC,MAAM,CAAC;AAC3C,4BAA4B,GAAG,CAAC,CAAC,GAAG,CAAC;AACrC,4BAA4B,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AACvH,4BAA4B,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AAClD,4BAA4B,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAChD,4BAA4B,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AAC5C,4BAA4B,OAAO,CAAC,CAAC,EAAE;AACvC,wBAAwB,GAAG;AAC3B,oBAAoB,EAAE;AACtB;AACA,oBAAoB,UAAU;AAC9B,wBAAwB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,4BAA4B,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,YAAY,EAAE;AAC/E,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,4BAA4B,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,EAAE;AAClF,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,4BAA4B,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,gBAAgB,EAAE;AACnF,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxD,4BAA4B,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,eAAe,EAAE;AAClF,wBAAwB,GAAG;AAC3B;AACA,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK;AACzE,oBAAoB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AACpE,wBAAwB,CAAC,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AAC/C,wBAAwB,GAAG,IAAI,CAAC,cAAc,CAAC;AAC/C,wBAAwB,GAAG,KAAK,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG;AAChG,wBAAwB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,4BAA4B,EAAE,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACnD,4BAA4B,EAAE,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;AAC1G,4BAA4B,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACxD,gCAAgC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;AAC1D,4BAA4B,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AAC/C,4BAA4B,QAAQ,CAAC,YAAY,EAAE;AACnD,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC;AAC7C,gCAAgC,MAAM,CAAC,CAAC,MAAM,CAAC;AAC/C,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AAC9I,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/G,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AACtD,gCAAgC,UAAU,CAAC,CAAC,CAAC,CAAC;AAC9C,gCAAgC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AAChD,gCAAgC,OAAO,CAAC,CAAC,OAAO;AAChD,4BAA4B,GAAG;AAC/B,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,4BAA4B,EAAE,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;AAC1G,4BAA4B,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACxD,gCAAgC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;AAC1D;AACA,4BAA4B,QAAQ,CAAC,eAAe,EAAE;AACtD,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC;AAC7C,gCAAgC,MAAM,CAAC,CAAC,MAAM,CAAC;AAC/C,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AAC7I,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/G,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AACtD,gCAAgC,UAAU,CAAC,CAAC,CAAC;AAC7C,4BAA4B,GAAG;AAC/B,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxD,4BAA4B,EAAE,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;AAC1G,4BAA4B,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACxD,gCAAgC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;AAC1D;AACA,4BAA4B,QAAQ,CAAC,gBAAgB,EAAE;AACvD,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC;AAC7C,gCAAgC,MAAM,CAAC,CAAC,MAAM,CAAC;AAC/C,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AAC7I,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/G,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AACtD,gCAAgC,UAAU,CAAC,CAAC,CAAC,CAAC;AAC9C,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,4BAA4B,GAAG;AAC/B,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,4BAA4B,EAAE,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK;AAC1G,4BAA4B,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACxD,gCAAgC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;AAC1D;AACA,4BAA4B,QAAQ,CAAC,eAAe,EAAE;AACtD,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC;AAC7C,gCAAgC,MAAM,CAAC,CAAC,MAAM,CAAC;AAC/C,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AAC7I,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AAC/G,gCAAgC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AACtD,gCAAgC,UAAU,CAAC,CAAC,CAAC,CAAC;AAC9C,gCAAgC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,4BAA4B,GAAG;AAC/B,wBAAwB,GAAG;AAC3B,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC;AAC/B,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,GAAG,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AACzE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG;AAC9E,YAAY,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,EAAE;AACtC,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC9C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE;AAC7C,YAAY,MAAM,CAAC,IAAI,EAAE;AACzB,gBAAgB,CAAC,MAAM,GAAG;AAC1B,YAAY,MAAM;AAClB,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,OAAO,EAAE,EAAE,CAAC,cAAc,EAAE,CAAC,CAAC,WAAW,CAAC;AAC3D,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG;AAClE,YAAY,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE;AAClE,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACpE,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,gBAAgB,CAAC,EAAE,CAAC,gBAAgB,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAChH,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC3C,gBAAgB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE;AAC3C;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG;AAClF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE;AAC1D,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AACxC,wBAAwB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AACtD,4BAA4B,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC;AACtD,wBAAwB,GAAG,MAAM,CAAC;AAClC,4BAA4B,QAAQ,CAAC,UAAU,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAC9D,gCAAgC,MAAM,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC;AAC7E,4BAA4B,EAAE;AAC9B,oBAAoB,GAAG;AACvB,YAAY,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,oBAAoB,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACvD,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;AAC/D,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;AACjE,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG;AACvI,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE;AAC1B,oBAAoB,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACrC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACjE,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACvE,YAAY,EAAE;AACd,YAAY,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC,QAAQ,EAAE,MAAM,GAAG;AAClD,YAAY,MAAM,CAAC,IAAI,GAAG,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE;AACpD,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE;AAC7D,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG;AACrI,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,MAAM,GAAG;AAC1B,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO;AACrE,YAAY,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,EAAE;AAC9G,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE;AAC/D,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK;AACrE,oBAAoB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG;AACrI,gBAAgB,GAAG;AACnB,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO;AAC9E,YAAY,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,EAAE;AAC5H,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE;AAC/D,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE;AAC1B,oBAAoB,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AACrC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACjE,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACvE,YAAY,EAAE;AACd;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK;AACjD,YAAY,EAAE,CAAC,UAAU,CAAC;AAC1B,YAAY,CAAC;AACb,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE;AAC3D,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,wBAAwB,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5C,4BAA4B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAC1D,gCAAgC,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,UAAU,CAAC;AAC1D,4BAA4B,GAAG,MAAM,CAAC;AACtC,gCAAgC,QAAQ,CAAC,UAAU,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC;AAClE,oCAAoC,MAAM,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC;AACjF,gCAAgC,EAAE;AAClC,wBAAwB,GAAG;AAC3B;AACA,gBAAgB,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,EAAE;AAC7C,oBAAoB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;AACzC,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC/C,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC7C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxH,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI;AACvG,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AACxC,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI;AAC7C;AACA,gBAAgB,MAAM,CAAC,IAAI,GAAG,MAAM,GAAG;AACvC,gBAAgB,MAAM,CAAC,IAAI,GAAG,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE;AACxD,oBAAoB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE;AACjE,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACpH,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI;AACtG,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,MAAM,GAAG;AAC9B,eAAe,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,kBAAkB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;AACjC,oBAAoB,CAAC,OAAO,EAAE,EAAE,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC;AAC9C,oBAAoB,CAAC,OAAO,EAAE,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AACvD,oBAAoB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE;AAC5C,gBAAgB,GAAG;AACnB,gBAAgB,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE;AACrE,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;AACpH,wBAAwB,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG;AACrG,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,SAAS;AACpF,YAAY,EAAE,CAAC,CAAC,sBAAsB,CAAC,CAAC;AACxC,YAAY,CAAC;AACb,gBAAgB,YAAY,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,sBAAsB;AACvF,gBAAgB,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,sBAAsB,CAAC,CAAC,sBAAsB,CAAC,EAAE;AACxF,YAAY,CAAC;AACb,YAAY,IAAI;AAChB,YAAY,CAAC;AACb,gBAAgB,sBAAsB,GAAG;AACzC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B;AACA,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3B,YAAY,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC;AAC7B;AACA,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,SAAS,GAAG;AACnD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK;AACpD,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC;AACnC,QAAQ,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;AAC5C,YAAY,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrC,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,KAAK,CAAC,KAAK,GAAG,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC/E,YAAY,GAAG;AACf,YAAY,MAAM,CAAC,IAAI,CAAC;AACxB,QAAQ,EAAE;AACV,QAAQ,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,UAAU,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC/E,YAAY,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrC,gBAAgB,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,EAAE;AAC9C,kBAAkB,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC;AACzD,kBAAkB,CAAC,SAAS,GAAG,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC;AACvD,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,WAAW,EAAE;AACjD,YAAY,GAAG;AACf,QAAQ,EAAE;AACV,IAAI,EAAE;AACN;AACA,IAAI,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG;AACpC,IAAI,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,UAAU,CAAC,IAAI,EAAE;AACtF,IAAI,GAAG;AACP;AACA,IAAI,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtD,QAAQ,EAAE,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,EAAE;AACvF,IAAI,GAAG;AACP;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACnF,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACrF,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC3E,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC3E,QAAQ,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC3E,QAAQ,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACvF,QAAQ,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACvF,QAAQ,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7F,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACrF,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACrF,QAAQ,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACrF,QAAQ,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACrF,QAAQ,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC/F,QAAQ,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC/F,QAAQ,YAAY,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACjG,QAAQ,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AACvF,QAAQ,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACzF,QAAQ,WAAW,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAC/F,QAAQ,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC7F,QAAQ,WAAW,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAChG,QAAQ,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC7E,QAAQ,sBAAsB,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,GAAG,MAAM,CAAC,sBAAsB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,sBAAsB,CAAC,CAAC,IAAI;AAC9H,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;AAC9F,QAAQ,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,gBAAgB,CAAC,CAAC,IAAI;AAC7G;AACA,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO;AACjC,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AACxF,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AACxF,QAAQ,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AAClG,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AACrG;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC3E,YAAY,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;AACvC,gBAAgB,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AACpC,YAAY,CAAC;AACb,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACloBF;AACA,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,OAAO,MAAM,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AAC1C,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACzC,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AAC3C,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,YAAY,EAAE;AACjD,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,YAAY,EAAE;AACjD,QAAQ,CAAC,CAAC,OAAO,MAAM,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AAC5C,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AACjE,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AAChD,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE;AACzC,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE;AACzC,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,KAAK;AAC9B,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,KAAK;AAC9B,QAAQ,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,SAAS,EAAE;AAC3E,QAAQ,CAAC,CAAC,MAAM,OAAO,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,UAAU,IAAI,CAAC,CAAC,KAAK;AAC/B,QAAQ,CAAC;AACT;AACA,IAAI,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,EAAE;AAChC,IAAI,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,EAAE,EAAE;AAC3C,IAAI,KAAK;AACT,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACrD,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;AACxB,IAAI,CAAC;AACL,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC,GAAG;AACpB,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC,GAAG;AACpB,IAAI,OAAO;AACX,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE;AACd,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AACjE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE;AACpE,YAAY,EAAE;AACd,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AAC3C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,OAAO,EAAE;AACpC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnC,oBAAoB,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1C,gBAAgB,IAAI;AACpB,oBAAoB,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE;AAC1E,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACxD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,gBAAgB;AACpD,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AACjE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AAClD,gBAAgB,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,SAAS,GAAG;AAC3D,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,YAAY,GAAG,IAAI,EAAE,IAAI,GAAG;AACrF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,IAAI;AAC5H,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC,MAAM;AAC5C,YAAY,MAAM,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,GAAG,KAAK,EAAE,OAAO,CAAC,MAAM,GAAG,IAAI,GAAG;AACrG;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,mBAAmB,GAAG;AACvE,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,cAAc,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC,WAAW,EAAE;AAC1C;AACA,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC7C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE;AAClC;AACA,gBAAgB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACjD,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC1F,gBAAgB,CAAC;AACjB;AACA,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC7C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AACjF,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,YAAY,OAAO;AACnB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,GAAG;AACtE,gBAAgB,CAAC,UAAU,CAAC,UAAU,EAAE;AACxC;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,EAAE;AAC1C,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AACvE,gBAAgB,CAAC,IAAI,CAAC,OAAO,EAAE;AAC/B;AACA;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,mBAAmB,EAAE;AAClD,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK;AAC9E;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,mBAAmB,GAAG,SAAS,GAAG,EAAE,CAAC,QAAQ,EAAE;AAC1F,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,oBAAoB,MAAM,CAAC,CAAC,CAAC;AAC7B,gBAAgB,GAAG;AACnB;AACA,YAAY,OAAO,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AACrE;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC,OAAO,EAAE;AAC1D,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,GAAG;AACnB;AACA,YAAY,OAAO,CAAC,KAAK,EAAE;AAC3B,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE;AAC3D,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE;AAC5C;AACA,YAAY,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG;AACxE,YAAY,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,gBAAgB,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC;AAC9C,YAAY,EAAE;AACd,gBAAgB,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,oBAAoB,CAAC,CAAC,OAAO,EAAE;AAC9E,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,EAAE;AACzC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AACnE,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AACnE,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oBAAoB,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvH,gBAAgB,GAAG;AACnB;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,EAAE;AACrD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AAC3E,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE;AACnD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,YAAY;AACjC,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG;AACzC,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtD,oBAAoB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,IAAI;AAC3E,gBAAgB,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,GAAG,MAAM,EAAE,CAAC,EAAE;AACzD,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,aAAa,GAAG;AACvD,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,aAAa,EAAE;AAC7C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AAC3E,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG;AACzC,oBAAoB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7B,oBAAoB,CAAC,KAAK,CAAC,eAAe,CAAC;AAC3C,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtD,oBAAoB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,IAAI;AAC3E,gBAAgB,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,GAAG,MAAM,EAAE,CAAC,EAAE;AACzD,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,aAAa,GAAG;AACvD,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,aAAa,EAAE;AAC7C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAClH,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AACzC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AACvE,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACxD,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,SAAS,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK;AAC1G,YAAY,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1E,gBAAgB,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AACrC,gBAAgB,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC;AACjI,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE;AACnC,gBAAgB,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC;AACjI,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,IAAI;AAC9C,YAAY,GAAG;AACf;AACA,YAAY,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3E,gBAAgB,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC;AAClG,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,EAAE;AACtE,gBAAgB,SAAS,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC;AAClG,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,IAAI;AACnE,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AAChD,YAAY,GAAG;AACf;AACA,YAAY,EAAE,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,MAAM;AAC/D,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC1B;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,GAAG;AAC7D,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACzF,QAAQ,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACzF,QAAQ,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACzF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACzF,QAAQ,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AACzF,QAAQ,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACrG,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACvF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAClD,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,YAAY,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE;AAC/B,YAAY,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE;AAC/B,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE;AAC5C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACxYF;AACA,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAClC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;AACrB,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI;AACxB,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE;AAC/B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,IAAI,GAAG,GAAG;AAC7C,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,OAAO;AACjB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,MAAM;AAChB,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,IAAI;AACjC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,IAAI;AACjC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE;AAC7C,QAAQ,CAAC;AACT;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD,IAAI;AACJ,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE;AAC1D,gBAAgB,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,GAAG;AACtD;AACA,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE;AAC1D,gBAAgB,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,GAAG;AACvD;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,GAAG,IAAI,EAAE,IAAI,GAAG;AAClF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,SAAS,GAAG;AAChG,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AACvF;AACA,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE;AAC9C,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AAClD,YAAY,KAAK,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG;AACzC,YAAY,KAAK,CAAC,IAAI,GAAG,MAAM,GAAG;AAClC,YAAY,KAAK;AACjB,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACjF,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AACxC,oBAAoB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC7D,oBAAoB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC7D,YAAY,EAAE;AACd;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;AAClF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE;AAC1D,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,oBAAoB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACjF,oBAAoB,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;AAChD,wBAAwB,EAAE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,4BAA4B,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD,4BAA4B,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC;AACtD,4BAA4B,MAAM,CAAC,MAAM,CAAC;AAC1C,wBAAwB,CAAC,CAAC,IAAI,CAAC,CAAC;AAChC,4BAA4B,MAAM,CAAC,IAAI,CAAC;AACxC,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,oBAAoB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI;AAClF,wBAAwB,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI;AAC9E,wBAAwB,YAAY,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AACtE,oBAAoB,MAAM,CAAC,EAAE,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI;AAC3L,gBAAgB,GAAG;AACnB,YAAY,MAAM,CAAC,KAAK,GAAG,MAAM,EAAE,MAAM,GAAG;AAC5C,YAAY,MAAM,CAAC,IAAI,GAAG,MAAM,GAAG;AACnC,YAAY,MAAM;AAClB,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,EAAE;AAC7E,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,EAAE;AAC7E,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7B,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,oBAAoB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,CAAC,CAAC;AAChG,4BAA4B,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC;AACpH,gBAAgB,GAAG;AACnB,QAAQ,GAAG;AACX,QAAQ;AACR,QAAQ,WAAW,CAAC,SAAS,EAAE,SAAS,CAAC,SAAS,GAAG;AACrD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACvF,QAAQ,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACzF,QAAQ,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAC3F,QAAQ,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAC3F,QAAQ,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACzF,QAAQ,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACzF,QAAQ,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC/E,QAAQ,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;AAC/E,QAAQ,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAC3F,QAAQ,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,gBAAgB,CAAC,CAAC,IAAI;AAC7G,QAAQ,gBAAgB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,gBAAgB,CAAC,CAAC,IAAI;AAC7G;AACA,QAAQ,EAAE,OAAO,CAAC,OAAO;AACzB,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AAClF,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AAClF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC7IF;AACA,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,SAAS,GAAG;AAC1C;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC5D,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,CAAC;AACX,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;AACpB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;AACxB,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE;AACvC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,IAAI,EAAE,EAAE;AACzC,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI;AAC9B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE;AAC7C,QAAQ,CAAC;AACT,QAAQ;AACR,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,SAAS,EAAE;AACtC,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AACxC,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;AACjF;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,GAAG;AACnC,YAAY,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,GAAG;AACnC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,aAAa,GAAG,IAAI,EAAE,IAAI,GAAG;AACtF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,aAAa,GAAG;AACpG,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,aAAa,GAAG;AACjE,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AAC7D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AAC7D;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,YAAY,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,aAAa,GAAG;AAC9D;AACA,YAAY,SAAS,CAAC,KAAK,CAAC,cAAc,EAAE,MAAM,CAAC,eAAe,EAAE;AACpE,YAAY,aAAa,CAAC,IAAI,CAAC,SAAS,EAAE;AAC1C;AACA,YAAY,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,GAAG;AAC1D,gBAAgB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,YAAY,EAAE;AACnE,oBAAoB,CAAC,IAAI,EAAE,YAAY,GAAG;AAC1C;AACA,gBAAgB,KAAK,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,YAAY,EAAE;AAC7E,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzD,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,GAAG,EAAE;AACvC,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG;AAC7E;AACA,gBAAgB,KAAK;AACrB,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACrF,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1D,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACnC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,oBAAoB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,SAAS,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7F,oBAAoB,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,GAAG;AACrD,YAAY,CAAC;AACb;AACA,YAAY,MAAM,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,GAAG,MAAM,EAAE,IAAI,EAAE;AACzD,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,cAAc,CAAC;AAChD,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;AAC7D,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,eAAe,GAAG,CAAC,GAAG;AAC/E;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AAChH,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC3E,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AAC9D;AACA,YAAY,EAAE,KAAK,CAAC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG;AAC3F,YAAY,QAAQ,CAAC,eAAe,EAAE,CAAC,CAAC;AACxC,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC;AACnC;AACA,gBAAgB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,GAAG,EAAE,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,EAAE;AAC3E;AACA,gBAAgB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,EAAE;AACnD,oBAAoB,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,EAAE;AAC/D,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC/C,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE;AAC9C;AACA,gBAAgB,UAAU,CAAC,IAAI,EAAE;AACjC,oBAAoB,CAAC,UAAU,GAAG,QAAQ,CAAC,GAAG,CAAC;AAC/C,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC/C,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC7C,oBAAoB,CAAC,MAAM,GAAG;AAC9B;AACA,gBAAgB,UAAU;AAC1B,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE;AACjH,oBAAoB,CAAC,UAAU,GAAG,QAAQ,CAAC,GAAG,CAAC;AAC/C,oBAAoB,CAAC,KAAK,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;AAC/C,oBAAoB,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE;AAC9C;AACA,gBAAgB,EAAE,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC;AAC1C;AACA,gBAAgB,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5C,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,gBAAgB,UAAU,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE;AACpE,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AAClC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3C,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,EAAE;AAC/C,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,GAAG,GAAG;AACxC;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE;AACrD,oBAAoB,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK;AAChF;AACA,gBAAgB,UAAU,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE;AACpE,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AACjC,oBAAoB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3C,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,KAAK,EAAE;AACjD,oBAAoB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,GAAG,GAAG;AACxC;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,MAAM,EAAE;AACrD,oBAAoB,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK;AAChF,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,cAAc,EAAE,CAAC,CAAC;AACvC,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC;AACnC;AACA,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC1D;AACA,gBAAgB,QAAQ,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,oBAAoB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3E,oBAAoB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,oBAAoB,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AAC1D,wBAAwB,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC;AACjF,4BAA4B,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/E,4BAA4B,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,oBAAoB,MAAM,CAAC,YAAY,CAAC;AACxC,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,KAAK;AAC3E,gBAAgB,eAAe,GAAG;AAClC,YAAY,CAAC;AACb;AACA,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,SAAS,GAAG;AACzD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;AAChC;AACA,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACtF,QAAQ,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACxF,QAAQ,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAClG,QAAQ,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AAClG,QAAQ,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACtG,QAAQ,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAChG,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,IAAI;AAC1G,QAAQ,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACxF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AAC9C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;ACxNF;AACA,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACpC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK;AAC/E,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG;AACvG,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK;AAC5F,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK;AAC5F,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO;AACjJ,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACzB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACzB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AAC3B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,aAAa;AACpE,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACvE,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,QAAQ,EAAE,SAAS,EAAE,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,YAAY,GAAG,SAAS,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE;AAClJ,QAAQ,CAAC;AACT;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI;AACvC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO;AAChE,IAAI,CAAC;AACL;AACA,IAAI,qCAAqC;AACzC,KAAK,CAAC,CAAC,MAAM,CAAC;AACd,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC;AACzB,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,EAAE;AACrC,KAAK,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AACvC,KAAK,CAAC;AACN,KAAK,CAAC,CAAC,KAAK,CAAC;AACb,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC;AAC9B,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC;AAChC,KAAK,qCAAqC;AAC1C;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE;AAC/D;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,OAAO,EAAE;AACpC,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACpE,gBAAgB,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AACtE;AACA,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC;AACA,YAAY,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;AAC/B,YAAY,EAAE,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK;AACtG,YAAY,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,gBAAgB,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,gBAAgB,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpE,oBAAoB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,oBAAoB,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,oBAAoB,MAAM,CAAC,CAAC,CAAC;AAC7B,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7D,gBAAgB,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AACxC,YAAY,GAAG;AACf;AACA,YAAY,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,EAAE;AACpC,gBAAgB,CAAC,KAAK,CAAC,KAAK,CAAC;AAC7B,gBAAgB,CAAC,MAAM,CAAC,MAAM,CAAC;AAC/B,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO;AAC1H,gBAAgB,CAAC,CAAC,CAAC,IAAI,CAAC;AACxB,gBAAgB,CAAC,CAAC,CAAC,IAAI,CAAC;AACxB,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,oBAAoB,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,wBAAwB,EAAE,CAAC,CAAC,EAAE;AAC9B,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB,YAAY,CAAC,YAAY,EAAE;AAC3B;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG,IAAI,EAAE,IAAI,GAAG;AACpF,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,WAAW,GAAG;AAClG,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,GAAG;AACrD,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG;AAC/C,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,GAAG;AAC5D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AAC/D;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF,YAAY;AACZ,YAAY,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AAC1F,YAAY,EAAE,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM;AAC9D,YAAY,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,gBAAgB,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,EAAE;AACzC,YAAY,CAAC;AACb,YAAY;AACZ,YAAY,OAAO;AACnB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,CAAC,CAAC,IAAI,CAAC;AACxB,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC;AACvF,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE;AACjE,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC,gBAAgB,IAAI;AACpB;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,EAAE;AACzD,gBAAgB,CAAC,KAAK,CAAC,IAAI,EAAE;AAC7B;AACA,YAAY,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE;AACtC;AACA,YAAY,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAE;AACxC,gBAAgB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACjD,gBAAgB,CAAC,MAAM,EAAE,IAAI,GAAG;AAChC;AACA,YAAY,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE;AACxD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;AACjF;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AACpC,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC;AACjC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC1D,gBAAgB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC;AAC1C,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,oBAAoB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC;AACxD,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,WAAW,CAAC,WAAW,EAAE;AAC1C;AACA,YAAY,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE;AACxC,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC;AACjC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC1D,gBAAgB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE;AAC3D,gBAAgB,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,GAAG;AAC5D;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,QAAQ,GAAG,SAAS,EAAE,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;AACzE,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AAChD;AACA,YAAY,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AACtG,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AACzC,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE;AAC7D,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAC3D,oBAAoB,QAAQ,CAAC,aAAa,EAAE;AAC5C,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACtC,wBAAwB,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAC9D,wBAAwB,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW;AAClD,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,YAAY,EAAE;AAC3C,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACtC,wBAAwB,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAC9D,wBAAwB,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW;AAClD,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AAC5D,oBAAoB,QAAQ,CAAC,SAAS,EAAE;AACxC,wBAAwB,KAAK,CAAC,CAAC,CAAC,CAAC;AACjC,wBAAwB,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACtC,wBAAwB,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAC9D,wBAAwB,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW;AAClD,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,YAAY,IAAI,CAAC,IAAI,GAAG,MAAM,GAAG;AACjC,YAAY,IAAI,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC7C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;AAC7D,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,GAAG;AAC7F,YAAY,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,WAAW,CAAC,IAAI,EAAE;AAChE,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1C,oBAAoB,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3C,gBAAgB,GAAG;AACnB;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtE,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE;AAClG,YAAY,GAAG;AACf,YAAY,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACrE,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE;AACnG,YAAY,GAAG;AACf;AACA,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,SAAS;AACtC,YAAY,KAAK,CAAC,6BAA6B,CAAC,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC;AACvE,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,MAAM;AAC9D,oBAAoB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM;AAC7E,oBAAoB,CAAC,CAAC;AACtB,oBAAoB,CAAC,CAAC;AACtB,oBAAoB,CAAC,CAAC;AACtB,oBAAoB,EAAE,CAAC,CAAC,CAAC,GAAG;AAC5B;AACA,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM;AACtE,oBAAoB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM;AAC9F,wBAAwB,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC;AAClH,oBAAoB,CAAC;AACrB;AACA,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC5G,wBAAwB,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAChD,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACpF,wBAAwB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACjD,4BAA4B,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,gBAAgB,MAAM,CAAC,EAAE,CAAC;AAC1B,YAAY,EAAE;AACd;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,SAAS,GAAG;AACvD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO;AACjC,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B;AACA,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,YAAY,EAAE,CAAC,QAAQ,GAAG,CAAC,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG;AACrG,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,GAAG,CAAC,QAAQ,CAAC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG;AAC7G,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,CAAC,QAAQ,GAAG,CAAC,QAAQ,CAAC,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,CAAC,GAAG;AAC3G;AACA,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC;AAClD,QAAQ,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AACxB,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAClC,QAAQ,EAAE,CAAC,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAC/C,QAAQ,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,QAAQ,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACpC,QAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACnC,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,EAAE;AACN;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI;AAClF,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACrF,QAAQ,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACpF,QAAQ,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AAC/E,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG;AACA,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO;AACjC,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AACxF,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,KAAK;AACxF;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,YAAY,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAC5B,gBAAgB,IAAI,CAAC,CAAC,KAAK,EAAE;AAC7B,oBAAoB,KAAK,CAAC,MAAM,EAAE,IAAI,GAAG;AACzC,oBAAoB,KAAK,CAAC,KAAK,EAAE,OAAO,GAAG;AAC3C,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,MAAM,EAAE;AAC9B,oBAAoB,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG;AAC3C,oBAAoB,KAAK,CAAC,KAAK,EAAE,MAAM,CAAC,GAAG,GAAG;AAC9C,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,oBAAoB,KAAK,CAAC,MAAM,EAAE,UAAU,GAAG;AAC/C,oBAAoB,KAAK,CAAC,KAAK,EAAE,MAAM,CAAC,GAAG,GAAG;AAC9C,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,MAAM,EAAE;AAC9B,oBAAoB,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG;AAC3C,oBAAoB,KAAK,CAAC,KAAK,EAAE,OAAO,GAAG;AAC3C,oBAAoB,KAAK,CAAC;AAC1B,gBAAgB,IAAI,CAAC,CAAC,aAAa,EAAE;AACrC,oBAAoB,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,6BAA6B,EAAE;AACtE,oBAAoB,KAAK,CAAC,KAAK,EAAE,OAAO,GAAG;AAC3C,oBAAoB,KAAK,CAAC;AAC1B,YAAY,CAAC;AACb,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACvC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE;AAC5C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AC1UF;AACA,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACzC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,WAAW,EAAE;AACzC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAClC,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACrC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE;AACvC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,oBAAoB,EAAE;AACtD,QAAQ,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE;AACvC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,WAAW,GAAG;AAC1D,QAAQ,CAAC;AACT;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI;AAC3B,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAChC,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,KAAK;AACjC,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK;AAC7B,QAAQ,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,KAAK;AACzC,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI;AACnC,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC9B,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,EAAE;AAChD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,GAAG,SAAS,EAAE;AAC1E,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,GAAG;AAC5B,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,OAAO,GAAG,MAAM,GAAG,QAAQ,EAAE;AAC1D,QAAQ,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;AAC5B,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC;AACT;AACA,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG;AAClC,IAAI,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC,CAAC,EAAE;AAC1C,IAAI,KAAK,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AACvD;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,GAAG;AACX;AACA,IAAI,gBAAgB,CAAC,OAAO;AAC5B,QAAQ,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,YAAY,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE;AAChE,QAAQ,GAAG;AACX;AACA,IAAI,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9B,QAAQ,iBAAiB,CAAC,CAAC,CAAC,IAAI,CAAC;AACjC;AACA,IAAI,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG;AAChC;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,GAAG;AAC1B,YAAY,MAAM,CAAC,CAAC;AACpB,gBAAgB,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG;AACrE,gBAAgB,KAAK,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE;AACtC,YAAY,EAAE;AACd,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC,QAAQ,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AAChC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC;AAC1C,gBAAgB,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC;AACpC,YAAY,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC;AAC3C,gBAAgB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,oBAAoB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACvD,gBAAgB,GAAG;AACnB,QAAQ,CAAC;AACT,IAAI,EAAE;AACN;AACA,IAAI,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM;AAC1C;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,OAAO,EAAE;AACpC,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD,QAAQ,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,EAAE;AACjD;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C,gBAAgB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;AAC5B,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,gBAAgB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3H;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE;AACjG,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnC;AACA,YAAY,KAAK;AACjB,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AACxD,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;AAC1C,gBAAgB,CAAC,MAAM,GAAG;AAC1B;AACA,YAAY,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ;AAC5C,YAAY,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC3E;AACA,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,gBAAgB,GAAG,CAAC,GAAG,CAAC;AACxB,gBAAgB,YAAY,CAAC,CAAC,CAAC,GAAG;AAClC,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AACpC,oBAAoB,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC;AACpD,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,EAAE;AAChE,oBAAoB,IAAI;AACxB,wBAAwB,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE;AACvD,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb,YAAY,EAAE,CAAC,KAAK,CAAC,MAAM;AAC3B,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG;AACjC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,gBAAgB,GAAG,IAAI,EAAE,IAAI,GAAG;AACzF,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,gBAAgB,GAAG,MAAM,EAAE,CAAC,GAAG;AAChH,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG;AACrC;AACA,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG;AAC9D,YAAY,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,YAAY,GAAG;AAChE;AACA,YAAY,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,GAAG;AAC1E,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,MAAM,EAAE,IAAI,GAAG;AACjF,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACjE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,GAAG;AACjE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AACnE,YAAY,UAAU,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG;AACnE;AACA,YAAY,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE,IAAI,GAAG,IAAI,EAAE,KAAK,EAAE,cAAc,EAAE,IAAI,EAAE,MAAM,EAAE,eAAe,EAAE;AAC5F;AACA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,GAAG;AAChF;AACA,YAAY,EAAE,CAAC,MAAM;AACrB,YAAY,EAAE,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,SAAS,MAAM,MAAM,GAAG;AACnE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,EAAE,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,cAAc,CAAC;AAC9H;AACA,gBAAgB,MAAM,CAAC,KAAK,CAAC,WAAW,EAAE;AAC1C,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE;AACpE;AACA,gBAAgB,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;AAClD,iBAAiB,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM;AAC7E,iBAAiB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7D,oBAAoB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE;AAC3F,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/H,iBAAiB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,WAAW,CAAC;AAC/D,oBAAoB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC9C,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM;AAC5E,gBAAgB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;AACtD,oBAAoB,EAAE,CAAC,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;AACtE,wBAAwB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,GAAG;AACrD,wBAAwB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACnI,oBAAoB,CAAC;AACrB;AACA,oBAAoB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,UAAU,EAAE;AAC9C,qBAAqB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAChH,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,QAAQ;AACvB,YAAY,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC,CAAC;AAChC,iBAAiB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,GAAG,SAAS,MAAM,MAAM,GAAG;AACtE,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;AACpC,oBAAoB,CAAC;AACrB,wBAAwB,GAAG,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,OAAO,EAAE;AAChE,wBAAwB,OAAO,CAAC,CAAC,CAAC,OAAO,EAAE;AAC3C,wBAAwB,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE;AAC7D,wBAAwB,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACtC,oBAAoB,EAAE;AACtB,oBAAoB,CAAC;AACrB,wBAAwB,GAAG,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE;AAC9D,wBAAwB,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE;AAC1C,wBAAwB,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE;AAC9D,wBAAwB,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC;AACvC,oBAAoB,EAAE;AACtB,oBAAoB,CAAC;AACrB,wBAAwB,GAAG,CAAC,CAAC,aAAa,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ,EAAE;AAClE,wBAAwB,OAAO,CAAC,CAAC,CAAC,QAAQ,EAAE;AAC5C,wBAAwB,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE;AAC9D,wBAAwB,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC;AACvC,oBAAoB,EAAE;AACtB,oBAAoB,CAAC;AACrB,wBAAwB,GAAG,CAAC,CAAC,aAAa,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG;AACtE,wBAAwB,OAAO,CAAC,CAAC,CAAC,aAAa,EAAE;AACjD,wBAAwB,QAAQ,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,aAAa,EAAE;AACrE,wBAAwB,KAAK,CAAC,CAAC,CAAC,aAAa,CAAC;AAC9C,oBAAoB,CAAC;AACrB,gBAAgB,EAAE;AAClB;AACA,gBAAgB,YAAY,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC/D,gBAAgB,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAChE,oBAAoB,MAAM,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACpE,gBAAgB,GAAG;AACnB;AACA,gBAAgB,QAAQ;AACxB,oBAAoB,CAAC,KAAK,CAAC,CAAC,YAAY,CAAC,CAAC;AAC1C,oBAAoB,CAAC,KAAK,IAAI,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,IAAI;AACrD;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,EAAE;AAC5C,oBAAoB,CAAC,KAAK,CAAC,YAAY,CAAC;AACxC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE;AACpC;AACA,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9H;AACA,gBAAgB,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;AAClD,oBAAoB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC;AAC7C,oBAAoB,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/H,gBAAgB,CAAC;AACjB;AACA,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,YAAY,EAAE;AAC5C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM;AAC5E,YAAY,CAAC;AACb;AACA,YAAY,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK;AACxF;AACA,YAAY,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;AAClC,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI;AAC9E,YAAY,CAAC;AACb;AACA,YAAY,EAAE,GAAG,CAAC,EAAE,CAAC,WAAW,CAAC,KAAK;AACtC,YAAY,EAAE,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC1C,gBAAgB,gBAAgB;AAChC,oBAAoB,CAAC,KAAK,CAAC,cAAc,CAAC;AAC1C,oBAAoB,CAAC,MAAM,CAAC,eAAe,CAAC;AAC5C,oBAAoB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;AACjE,oBAAoB,CAAC,YAAY,CAAC,SAAS,CAAC;AAC5C,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE;AAC/B,gBAAgB,IAAI,CAAC,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACtE,YAAY,CAAC;AACb;AACA,YAAY,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE;AACrD,gBAAgB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,cAAc,CAAC;AAC9C,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,CAAC,eAAe,EAAE;AACjD;AACA,YAAY,OAAO;AACnB,gBAAgB,CAAC,KAAK,CAAC,cAAc,CAAC;AACtC,gBAAgB,CAAC,MAAM,CAAC,eAAe,CAAC;AACxC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,oBAAoB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAClD,gBAAgB,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACxE;AACA,YAAY,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE;AACnE,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI;AACzE;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI;AACzB,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AAC5E,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;AACpD,YAAY,CAAC;AACb;AACA,YAAY,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;AAC5B,gBAAgB,GAAG,CAAC,KAAK,CAAC;AAC1B,gBAAgB,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;AACpD,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9B,gBAAgB,CAAC;AACjB,gBAAgB,IAAI,CAAC,CAAC;AACtB,oBAAoB,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC,IAAI,EAAE;AAC1E,gBAAgB,CAAC;AACjB,gBAAgB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9B,oBAAoB,CAAC,MAAM,CAAC,KAAK,CAAC;AAClC,oBAAoB,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,CAAC,EAAE;AAClD,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,MAAM,CAAC,IAAI;AAC1B,YAAY,8DAA8D;AAC1E,YAAY,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AACpC,gBAAgB,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,oBAAoB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACvD,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,IAAI;AAClF,wBAAwB,CAAC,UAAU,EAAE;AACrC,wBAAwB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AAC3C,wBAAwB,CAAC,IAAI,CAAC,KAAK,CAAC;AACpC,wBAAwB,CAAC;AACzB,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAC;AACpC,gBAAgB,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,oBAAoB,EAAE,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC;AAC9F,wBAAwB,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,GAAG;AAC/D;AACA,wBAAwB,EAAE,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,EAAE,CAAC,aAAa,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;AACpF,4BAA4B,cAAc,CAAC,CAAC,CAAC,aAAa,CAAC;AAC3D;AACA,wBAAwB,EAAE,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC;AAC9E,wBAAwB,KAAK,CAAC,UAAU,CAAC,gBAAgB,EAAE;AAC3D,oBAAoB,CAAC;AACrB,oBAAoB,IAAI,CAAC,CAAC;AAC1B,wBAAwB,EAAE,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC;AAC7C,4BAA4B,KAAK,CAAC,UAAU,CAAC,cAAc,EAAE;AAC7D,4BAA4B,cAAc,CAAC,CAAC,CAAC,IAAI,CAAC;AAClD,wBAAwB,CAAC;AACzB,oBAAoB,CAAC;AACrB;AACA,oBAAoB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AACvD,oBAAoB,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC;AAC7C,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK;AAC3B,YAAY,8DAA8D;AAC1E,YAAY,EAAE,EAAE,WAAW,CAAC,CAAC,CAAC;AAC9B,gBAAgB,WAAW,CAAC,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE;AACvD,gBAAgB,WAAW,GAAG;AAC9B,gBAAgB,WAAW,GAAG;AAC9B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,KAAK,CAAC,KAAK,CAAC,cAAc,EAAE;AAC5C,gBAAgB,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,SAAS,EAAE;AACzC,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI;AACtH,oBAAoB,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,GAAG;AAC5E,oBAAoB,CAAC,IAAI,CAAC,KAAK,EAAE;AACjC,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG;AAC1F,gBAAgB,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE;AACpC,oBAAoB,OAAO,CAAC,MAAM,EAAE;AACpC,gBAAgB,CAAC;AACjB,YAAY,CAAC;AACb;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5D,YAAY,8DAA8D;AAC1E;AACA,YAAY,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACjE,gBAAgB,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACjF,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC9C,wBAAwB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3C,oBAAoB,GAAG;AACvB,gBAAgB,IAAI;AACpB,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChD,wBAAwB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE;AAC1D,oBAAoB,GAAG;AACvB;AACA,gBAAgB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG;AAC/E,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C;AACA,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,MAAM,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClE,gBAAgB,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,QAAQ,CAAC;AACzC,oBAAoB,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE;AAC/C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC;AACxC;AACA,gBAAgB,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7D,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,oBAAoB,MAAM,CAAC,CAAC,CAAC;AAC7B,gBAAgB,GAAG;AACnB,gBAAgB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AACnC;AACA,gBAAgB,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE;AACvC;AACA;AACA,gBAAgB,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,GAAG;AAC9C,gBAAgB,QAAQ,CAAC,WAAW,CAAC,KAAK,EAAE;AAC5C;AACA,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1E,gBAAgB,OAAO,CAAC,eAAe,GAAG;AAC1C,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,UAAU,CAAC,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9G,gBAAgB,IAAI;AACpB,oBAAoB,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/C,wBAAwB,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC;AAChD,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,wBAAwB,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,iBAAiB,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACnG,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;AAC9D,wBAAwB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,UAAU,EAAE;AACvE,wBAAwB,EAAE,CAAC,CAAC,WAAW,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AAClD,4BAA4B,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,IAAI,EAAE;AACxE,wBAAwB,CAAC;AACzB,wBAAwB,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,MAAM,CAAC;AACjE,wBAAwB,EAAE,CAAC,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC;AACpF,wBAAwB,EAAE,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,GAAG;AAChI;AACA,wBAAwB,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC;AACzG,wBAAwB,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE;AACzH,wBAAwB,OAAO,CAAC,IAAI,EAAE;AACtC,4BAA4B,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC5C,4BAA4B,KAAK,CAAC,CAAC,YAAY,CAAC;AAChD,4BAA4B,KAAK,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;AACpE,4BAA4B,KAAK,CAAC,CAAC,KAAK;AACxC,wBAAwB,GAAG;AAC3B;AACA,wBAAwB,EAAE,CAAC,CAAC,kBAAkB,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACxG,0BAA0B,QAAQ,CAAC,EAAE,CAAC,YAAY,CAAC;AACnD,0BAA0B,aAAa,CAAC,CAAC,CAAC,KAAK,CAAC;AAChD,wBAAwB,EAAE;AAC1B,oBAAoB,GAAG;AACvB;AACA,gBAAgB,OAAO,CAAC,OAAO,GAAG;AAClC;AACA,gBAAgB,EAAE,SAAS,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC;AAC3F,gBAAgB,EAAE,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,oBAAoB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,EAAE;AACjE,oBAAoB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,IAAI,CAAC;AACrE,oBAAoB,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACxD;AACA,wBAAwB,EAAE,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM;AACtH,wBAAwB,EAAE,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC;AACtF,wBAAwB,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AAClD,wBAAwB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE,EAAE;AAC1E,wBAAwB,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,EAAE;AACxE,wBAAwB,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,EAAE;AACrF,wBAAwB,CAAC;AACzB,4BAA4B,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,4BAA4B,MAAM,CAAC;AACnC,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB,oBAAoB,EAAE,CAAC,CAAC,gBAAgB,CAAC,EAAE,CAAC,IAAI,CAAC;AACjD,wBAAwB,OAAO,CAAC,gBAAgB,EAAE,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;AACnE,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC;AACnF,gBAAgB,EAAE,CAAC,CAAC,kBAAkB,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC;AACjH,oBAAoB,OAAO,CAAC,IAAI,EAAE;AAClC,wBAAwB,GAAG,CAAC,CAAC,UAAU,CAAC;AACxC,wBAAwB,KAAK,CAAC,CAAC,QAAQ,CAAC;AACxC,wBAAwB,KAAK,CAAC,CAAC,IAAI;AACnC,oBAAoB,GAAG;AACvB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,WAAW,CAAC,UAAU,EAAE;AAC/D;AACA,gBAAgB,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,gBAAgB,CAAC,OAAO,CAAC,cAAc,GAAG;AAC/E,gBAAgB,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI;AAClG,gBAAgB,EAAE,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC;AAC1F,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC;AAC/C,wBAAwB,iBAAiB,CAAC,CAAC,CAAC,cAAc,CAAC;AAC3D,oBAAoB,CAAC;AACrB,oBAAoB,EAAE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC;AAC5E,oBAAoB,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,GAAG,CAAC,IAAI;AACtD,gBAAgB,CAAC;AACjB,gBAAgB,IAAI,CAAC,CAAC;AACtB,oBAAoB,EAAE,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC;AAC5C,wBAAwB,cAAc,CAAC,CAAC,CAAC,iBAAiB,CAAC;AAC3D,wBAAwB,iBAAiB,CAAC,CAAC,CAAC,IAAI,CAAC;AACjD,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB;AACA,gBAAgB,gBAAgB,CAAC,OAAO;AACxC,oBAAoB,CAAC,cAAc,CAAC,cAAc,CAAC;AACnD,oBAAoB,CAAC,IAAI,CAAC;AAC1B,oBAAoB,CAAC;AACrB,wBAAwB,KAAK,CAAC,CAAC,MAAM,CAAC;AACtC,wBAAwB,MAAM,CAAC,CAAC,OAAO;AACvC,oBAAoB,CAAC;AACrB,gBAAgB,IAAI;AACpB;AACA,gBAAgB,gBAAgB,CAAC,eAAe,CAAC,cAAc,EAAE;AACjE;AACA,YAAY,GAAG;AACf;AACA,YAAY,gBAAgB,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACxE,gBAAgB,OAAO,CAAC,eAAe,GAAG;AAC1C,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE;AACtD,YAAY,KAAK,CAAC,QAAQ,CAAC,EAAE,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3D,gBAAgB,OAAO,CAAC,MAAM,EAAE;AAChC,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO;AACvE,YAAY,QAAQ,CAAC,EAAE,EAAE,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD;AACA,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7F,oBAAoB,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAwB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACxD,oBAAoB,GAAG;AACvB;AACA,oBAAoB,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AAChD,gBAAgB,CAAC;AACjB;AACA,gBAAgB,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC;AACrD,oBAAoB,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE;AAC3C,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACpC,gBAAgB,CAAC;AACjB;AACA,gBAAgB,KAAK,CAAC,MAAM,GAAG;AAC/B,YAAY,GAAG;AACf;AACA,YAAY,8DAA8D;AAC1E,YAAY,EAAE,CAAC,SAAS;AACxB,YAAY,8DAA8D;AAC1E;AACA,YAAY,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC;AACtC,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC;AACtC,gBAAgB,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE;AACvE,oBAAoB,CAAC,KAAK,CAAC;AAC3B,oBAAoB,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACpE,wBAAwB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5C,4BAA4B,MAAM,CAAC,CAAC;AACpC,gCAAgC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAC3C,gCAAgC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAC7C,gCAAgC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;AACnD,gCAAgC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvE,oCAAoC,MAAM,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE;AAC1G,gCAAgC,GAAG;AACnC,gCAAgC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc;AAChE,4BAA4B,EAAE;AAC9B,wBAAwB,EAAE;AAC1B,gBAAgB,EAAE;AAClB,gBAAgB,WAAW,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,EAAE;AAC1E;AACA,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AAC3C,gBAAgB,WAAW,GAAG;AAC9B,gBAAgB,WAAW,GAAG;AAC9B,YAAY,CAAC;AACb;AACA,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,GAAG;AAC9D,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,QAAQ,GAAG,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,GAAG,CAAC,KAAK,EAAE;AAChD,QAAQ,GAAG,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,GAAG,GAAG,CAAC,KAAK,EAAE;AAChD,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AAClE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5B,IAAI,GAAG;AACP,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC;AAC/B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC;AAC/B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,gBAAgB,CAAC;AAC9C,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAC3F,QAAQ,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AACvG,QAAQ,SAAS,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC7F,QAAQ,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,IAAI;AAC3F,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAClF,QAAQ,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AACpG,QAAQ,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,IAAI;AACvG,QAAQ,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,IAAI;AAC1G,QAAQ,kBAAkB,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,IAAI;AACxH,QAAQ,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,IAAI;AAChG,QAAQ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,IAAI;AACjG,QAAQ,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,KAAK;AACvG,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,WAAW,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,WAAW,CAAC,CAAC,KAAK;AAC7G;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC;AACtC,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AACnC,gBAAgB,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;AAClC,YAAY,CAAC;AACb,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,QAAQ,GAAG;AACX,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAC7E,YAAY,KAAK,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC;AACvF,YAAY,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC;AACzF,YAAY,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC;AAC1F,YAAY,KAAK,CAAC,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC;AACxF,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACvC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,YAAY,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACrC,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClE,YAAY,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACzC,YAAY,MAAM,CAAC,KAAK,CAAC,KAAK,EAAE;AAChC,YAAY,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE;AACjC,YAAY,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE;AAC/B,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AACzB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,GAAG;AACX,QAAQ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;AACzB,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;AACvB,QAAQ,GAAG;AACX,QAAQ,eAAe,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrF,YAAY,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG;AAC9D,QAAQ,GAAG;AACX,QAAQ,uBAAuB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrG,YAAY,uBAAuB,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAC1C,YAAY,KAAK,CAAC,WAAW,EAAE,CAAC,EAAE;AAClC,YAAY,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE;AACjC,YAAY,OAAO,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC,EAAE;AAC5C,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE;AAC5C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC;AACA,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AACF;AACA,EAAE,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AAClD,EAAE,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,gBAAgB,EAAE;AACrC,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,EAAE;AAC3B,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,EAAE;AACzB,EAAE;AChqBF,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,WAAW,CAAC,oBAAoB;AAC/D,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACjC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACvD,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG;AACrB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG;AACtB,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AACxB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE;AACrJ,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG;AACxG,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;AAC1B,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK;AAC5B,QAAQ,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,GAAG;AACxI,QAAQ,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/B,QAAQ,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE;AAC5D,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE;AAC7C,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI;AACnC,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,gBAAgB,EAAE,CAAC,CAAC,eAAe,EAAE,CAAC,CAAC,SAAS,GAAG;AAC1J;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,GAAG;AACtD,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,GAAG;AAC5B;AACA,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,EAAE;AACrD;AACA,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC,cAAc,CAAC,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC;AACtD,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,GAAG;AAC3B;AACA,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,EAAE;AAC1B,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;AACrF,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,EAAE;AAC1F,QAAQ,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;AAC/D,QAAQ,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG;AACvE;AACA,IAAI,QAAQ,CAAC,yBAAyB,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3C,QAAQ,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,EAAE;AAChD,QAAQ,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,EAAE;AAC7B,YAAY,MAAM,CAAC,GAAG,CAAC;AACvB,QAAQ,CAAC;AACT,QAAQ,IAAI,CAAC,CAAC;AACd,YAAY,MAAM,CAAC,CAAC,CAAC;AACrB,QAAQ,CAAC;AACT,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,QAAQ,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AACpE,QAAQ,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI;AACzE,QAAQ,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AACjF,QAAQ,MAAM,CAAC,WAAW,CAAC;AAC3B,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AACpE,QAAQ,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI;AACzE,QAAQ,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE;AACvD,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC;AACvC,QAAQ,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI;AACpE,QAAQ,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI;AACzE;AACA,QAAQ,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC;AACzC,QAAQ,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,cAAc,CAAC;AACrC,IAAI,CAAC;AACL;AACA,IAAI,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC;AAC5C,IAAI,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,GAAG;AACxE,QAAQ,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACrD,QAAQ,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG;AAClE;AACA,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,YAAY,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,GAAG;AAC9C,QAAQ,CAAC;AACT,QAAQ,IAAI,CAAC,CAAC;AACd,YAAY,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,gBAAgB,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,GAAG;AAChC,gBAAgB,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,EAAE,CAAC,CAAC,GAAG;AAC7C,gBAAgB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE;AAC9B,YAAY,CAAC;AACb,QAAQ,EAAE;AACV,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;AAChC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE;AAC9E;AACA,QAAQ,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7B,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAC3B;AACA,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvB,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AACzB,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACvB,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AACzB;AACA,YAAY,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE;AAC1B,QAAQ,EAAE;AACV,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,QAAQ,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,EAAE;AAC1B,QAAQ,EAAE,EAAE,CAAC,aAAa,CAAC,CAAC,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG;AACrD,QAAQ,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE;AAClC,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AACxB,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACtB,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;AACxB,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACtB,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,0BAA0B,CAAC,KAAK,CAAC,CAAC,CAAC;AAChD,QAAQ,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,EAAE;AAClC,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AAC3B,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,EAAE;AACtC,YAAY,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;AAClC,YAAY,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE;AACrB,gBAAgB,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AAC9B,gBAAgB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5B,gBAAgB,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;AAC9B,gBAAgB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC5B,YAAY,CAAC;AACb,YAAY,IAAI,CAAC,CAAC;AAClB,gBAAgB,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7B,gBAAgB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,gBAAgB,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7B,gBAAgB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,YAAY,CAAC;AACb,YAAY,kBAAkB,CAAC,CAAC,EAAE;AAClC,QAAQ,GAAG;AACX,IAAI,CAAC;AACL;AACA,IAAI,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3B,QAAQ,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE;AAChD,QAAQ,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE;AAC9C;AACA,QAAQ,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ;AACrC,QAAQ,MAAM,CAAC,UAAU,GAAG,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;AAC9C;AACA,QAAQ,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI;AACpD,QAAQ,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACjB;AACA,QAAQ,IAAI,CAAC,UAAU,EAAE;AACzB,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC;AAC/B,YAAY,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,YAAY,CAAC;AACzC,YAAY,CAAC,IAAI,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,gBAAgB,EAAE,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,OAAO;AACjF,gBAAgB,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;AACvG,gBAAgB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;AACtD,oBAAoB,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;AAC3C,wBAAwB,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO;AACzE,wBAAwB,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE;AACpE,wBAAwB,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,EAAE,IAAI,GAAG;AAChE;AACA,wBAAwB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,SAAS;AAC7E,wBAAwB,OAAO,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,CAAC;AAC/D,wBAAwB,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE;AACnE,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE;AACrD,4BAA4B,EAAE,CAAC,qBAAqB,CAAC,CAAC,EAAE,CAAC,CAAC;AAC1D,gCAAgC,MAAM,CAAC,CAAC,CAAC;AACzC,4BAA4B,CAAC;AAC7B,4BAA4B,IAAI,CAAC,CAAC;AAClC,gCAAgC,MAAM,CAAC,CAAC,CAAC;AACzC,4BAA4B,CAAC;AAC7B,wBAAwB,EAAE;AAC1B,wBAAwB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvD,4BAA4B,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;AAC7D,4BAA4B,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7C,4BAA4B,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAC5E,4BAA4B,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE;AACzD,gCAAgC,MAAM,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AAC3E,4BAA4B,CAAC;AAC7B,4BAA4B,IAAI,CAAC,CAAC;AAClC,gCAAgC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,EAAE;AACxE,gCAAgC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,CAAC,EAAE;AAC5E,gCAAgC,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,oCAAoC,MAAM,CAAC,CAAC,MAAM,GAAG,CAAC,WAAW,CAAC,GAAG,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACxG,gCAAgC,CAAC;AACjC,gCAAgC,IAAI,CAAC,CAAC;AACtC,oCAAoC,MAAM,CAAC,CAAC,MAAM,GAAG,CAAC,WAAW,CAAC,GAAG,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACxI,gCAAgC,CAAC;AACjC,4BAA4B,CAAC;AAC7B,wBAAwB,GAAG;AAC3B,oBAAoB,CAAC;AACrB,gBAAgB,CAAC;AACjB,YAAY,EAAE;AACd,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ;AACrB,IAAI,8DAA8D;AAClE,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AACxC,YAAY,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAC/E,YAAY,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AAClF,YAAY,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnE;AACA,YAAY,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG;AACjC;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK;AACrD,YAAY,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,GAAG;AACtE,YAAY,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/B,gBAAgB,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE;AAC5C,oBAAoB,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;AAC7E,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK;AAC9K,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK;AAC9K,YAAY,CAAC;AACb;AACA,YAAY,SAAS,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACnD,gBAAgB,QAAQ,CAAC,UAAU,EAAE;AACrC,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC;AAC5B,oBAAoB,KAAK,CAAC,CAAC,CAAC,CAAC;AAC7B,oBAAoB,GAAG,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC;AAClC,oBAAoB,EAAE,CAAC,CAAC,EAAE;AAC1B,gBAAgB,GAAG;AACnB,YAAY,GAAG;AACf;AACA,YAAY,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,IAAI;AAC3D;AACA,YAAY,EAAE,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK;AACnE,YAAY,EAAE,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC;AACtD,YAAY,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,OAAO,EAAE;AAC1D;AACA,YAAY,0BAA0B,CAAC,KAAK,EAAE;AAC9C,YAAY,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,GAAG,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC;AACtE;AACA,YAAY,EAAE,MAAM,CAAC,GAAG,CAAC,UAAU;AACnC,YAAY,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE;AAChC,gBAAgB,CAAC,MAAM,EAAE,CAAC,EAAE;AAC5B,gBAAgB,CAAC,IAAI,EAAE,KAAK,GAAG,GAAG,CAAC,SAAS,EAAE;AAC9C;AACA,YAAY,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE;AAC9B,gBAAgB,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC;AAC/B,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,oBAAoB,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAClC,wBAAwB,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;AACvC,oBAAoB,CAAC;AACrB,oBAAoB,IAAI,CAAC,EAAE,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC;AAClD,wBAAwB,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,IAAI,EAAE;AACvE,oBAAoB,CAAC;AACrB,oBAAoB,IAAI,CAAC,CAAC;AAC1B,wBAAwB,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE;AAC7C,oBAAoB,CAAC;AACrB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,CAAC,EAAE,GAAG,EAAE;AACxC,gBAAgB,CAAC,EAAE,EAAE,KAAK,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC3C,oBAAoB,SAAS,CAAC,CAAC,EAAE;AACjC,oBAAoB,QAAQ,CAAC,YAAY,EAAE;AAC3C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,CAAC;AAChC,oBAAoB,EAAE;AACtB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;AACjF,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,wBAAwB,KAAK,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,IAAI,GAAG;AAC7D,wBAAwB,OAAO,CAAC,CAAC,qBAAqB,CAAC,CAAC,CAAC;AACzD,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,QAAQ,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9C,oBAAoB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,EAAE;AAChF,oBAAoB,QAAQ,CAAC,eAAe,EAAE;AAC9C,wBAAwB,IAAI,CAAC,CAAC,CAAC;AAC/B,oBAAoB,GAAG;AACvB,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,EAAE,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE;AAC/C,oBAAoB,QAAQ,CAAC,gBAAgB,EAAE;AAC/C,wBAAwB,IAAI,CAAC,CAAC,CAAC;AAC/B,oBAAoB,GAAG;AACvB,gBAAgB,GAAG;AACnB;AACA,YAAY,GAAG,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI;AACjE,YAAY,GAAG,KAAK,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC;AAChE,YAAY,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC;AACzE,YAAY,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,EAAE;AAChC,gBAAgB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE;AAC9C,oBAAoB,CAAC,UAAU,EAAE;AACjC,oBAAoB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACvC,oBAAoB,CAAC,SAAS,EAAE,CAAC,EAAE,CAAC,cAAc,EAAE;AACpD,YAAY,GAAG;AACf;AACA,YAAY,EAAE,CAAC,UAAU,EAAE;AAC3B,gBAAgB,EAAE,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI;AACvD,gBAAgB,EAAE,CAAC,SAAS,EAAE,IAAI,GAAG,MAAM,GAAG;AAC9C;AACA,gBAAgB,EAAE,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI;AAChE,gBAAgB,EAAE,CAAC,MAAM,EAAE,IAAI,EAAE;AACjC,oBAAoB,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG;AAC/D,oBAAoB,CAAC,UAAU,EAAE;AACjC,oBAAoB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACvC,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjD,wBAAwB,EAAE,CAAC,qBAAqB,CAAC,CAAC,EAAE,CAAC,CAAC;AACtD,4BAA4B,MAAM,CAAC,CAAC,CAAC;AACrC,wBAAwB,CAAC;AACzB,wBAAwB,IAAI,CAAC,CAAC;AAC9B,4BAA4B,MAAM,CAAC,CAAC,CAAC;AACrC,wBAAwB,CAAC;AACzB,oBAAoB,EAAE;AACtB,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,wBAAwB,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;AACzD,wBAAwB,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE;AAC1C,4BAA4B,MAAM,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACpF,wBAAwB,CAAC;AACzB,wBAAwB,IAAI,CAAC,CAAC;AAC9B,4BAA4B,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,EAAE;AACpE,4BAA4B,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,CAAC,EAAE;AACxE,4BAA4B,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACjD,gCAAgC,MAAM,CAAC,CAAC,MAAM,GAAG,CAAC,WAAW,CAAC,GAAG,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG;AACpG,4BAA4B,CAAC;AAC7B,4BAA4B,IAAI,CAAC,CAAC;AAClC,gCAAgC,MAAM,CAAC,CAAC,MAAM,GAAG,CAAC,WAAW,CAAC,GAAG,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI;AACpI,4BAA4B,CAAC;AAC7B,wBAAwB,CAAC;AACzB,oBAAoB,GAAG;AACvB,YAAY,CAAC;AACb;AACA,YAAY,EAAE,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC;AAC9D,YAAY,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE;AAC9C;AACA;AACA,YAAY,EAAE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,GAAG;AAC3C,YAAY,EAAE,CAAC,IAAI,EAAE;AACrB,gBAAgB,CAAC,UAAU,EAAE;AAC7B,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACnC,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;AAClC,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,QAAQ,CAAC,CAAC,EAAE;AACxC,oBAAoB,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE;AACnC,oBAAoB,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AACjD,gBAAgB,EAAE;AAClB,gBAAgB,CAAC,MAAM,GAAG;AAC1B,QAAQ,GAAG;AACX;AACA;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,QAAQ,CAAC,SAAS,GAAG;AACpD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,IAAI;AACjF,QAAQ,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACnF,QAAQ,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,IAAI;AAC/E,QAAQ,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI;AAC3E,QAAQ,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,IAAI;AACvF,QAAQ,kBAAkB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,GAAG,CAAC,IAAI;AACrH,QAAQ,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,UAAU,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,UAAU,GAAG,CAAC,GAAG;AAC5F,QAAQ,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,GAAG;AAC7F,QAAQ,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,cAAc,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC,GAAG;AACtG,QAAQ,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG;AACxE,QAAQ,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG;AACrE,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC1E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC5E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC7E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC3E,QAAQ,GAAG;AACX,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP;AACA,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB,EAAE;AChYF,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE;AACjB;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ;AAC7C,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,QAAQ,GAAG;AACxC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,GAAG;AACtC;AACA,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;AAC3D,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI;AACtB,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE;AACzC,QAAQ,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,KAAK;AACpC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC;AACjD,QAAQ,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI;AAC7B,QAAQ,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI;AACvB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG;AACxB,QAAQ,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC,WAAW,GAAG,SAAS,GAAG;AAC3E;AACA;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,OAAO,CAAC,SAAS;AACxB,IAAI,8DAA8D;AAClE;AACA,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE;AACrD;AACA,IAAI,OAAO;AACX,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpB,QAAQ,CAAC,aAAa,CAAC,KAAK,CAAC;AAC7B,QAAQ,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AAChD;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ;AACrB,IAAI,8DAA8D;AAClE;AACA,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/B,QAAQ,WAAW,CAAC,KAAK,GAAG;AAC5B,QAAQ,WAAW,CAAC,MAAM,CAAC,QAAQ,EAAE;AACrC;AACA,QAAQ,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;AACvC,YAAY,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE;AAC5C;AACA,YAAY,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,EAAE;AACxC;AACA,YAAY,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACnF,YAAY,GAAG,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE;AACtF;AACA,YAAY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAC;AACvC,gBAAgB,EAAE,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACrC,oBAAoB,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1C,gBAAgB,CAAC,CAAC,IAAI,CAAC,CAAC;AACxB,oBAAoB,SAAS,CAAC,UAAU,GAAG,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE;AAC1E,gBAAgB,CAAC;AACjB,YAAY,EAAE;AACd,YAAY,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;AACxC;AACA,YAAY,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC;AAClE,YAAY,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AACxC,gBAAgB,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,EAAE;AAClD,gBAAgB,MAAM,CAAC,KAAK,CAAC;AAC7B,YAAY,CAAC,CAAC,IAAI,CAAC,CAAC;AACpB,gBAAgB,SAAS,CAAC,SAAS,GAAG,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG;AAC3D,YAAY,CAAC;AACb;AACA,YAAY,QAAQ,CAAC,KAAK,CAAC,cAAc,EAAE,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,MAAM,EAAE;AAClF,YAAY,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE;AACrC,QAAQ,GAAG;AACX;AACA,QAAQ,WAAW,CAAC,SAAS,EAAE,aAAa,CAAC,SAAS,GAAG;AACzD,QAAQ,MAAM,CAAC,KAAK,CAAC;AACrB,IAAI,CAAC;AACL;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC;AACxD,IAAI,8DAA8D;AAClE;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACpE,QAAQ,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AACtB,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AAC/B,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE;AACrD,YAAY,KAAK,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC;AAC7B,YAAY,OAAO,CAAC,CAAC,GAAG,CAAC,OAAO;AAChC,QAAQ,EAAE;AACV,QAAQ,EAAE,CAAC,EAAE,kBAAkB,CAAC,CAAC,CAAC;AAClC,YAAY,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC;AAC/B,YAAY,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC;AACtC,QAAQ,CAAC;AACT,QAAQ,OAAO,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,EAAE;AACxC,IAAI,GAAG;AACP;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACnE,QAAQ,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE;AAC7B,IAAI,GAAG;AACP;AACA,IAAI,QAAQ,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,CAAC,OAAO,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;AACpE,QAAQ,OAAO,GAAG;AAClB,IAAI,GAAG;AACP;AACA,IAAI,8DAA8D;AAClE,IAAI,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS;AAC9B,IAAI,8DAA8D;AAClE;AACA,IAAI,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU;AACpC,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC;AAC9B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC;AAC5B,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE;AACrD;AACA,IAAI,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,SAAS;AAC/E,IAAI,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,CAAC;AACxC,QAAQ,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,MAAM;AAC5D,QAAQ,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,eAAe,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI;AACzG,QAAQ,YAAY,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAAG,SAAS,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC,IAAI;AAC/G,QAAQ,kBAAkB,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,GAAG,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,IAAI;AACrH;AACA,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM;AACzD,QAAQ,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACtB,YAAY,QAAQ,CAAC,KAAK,CAAC,KAAK,EAAE;AAClC,QAAQ,GAAG;AACX,QAAQ,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACvE,YAAY,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;AACzB,YAAY,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE;AACxC,YAAY,QAAQ,CAAC,QAAQ,CAAC,QAAQ,EAAE;AACxC,QAAQ,GAAG;AACX,QAAQ,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,GAAG,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE;AACnE,YAAY,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC;AAC3E,YAAY,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AAC7E,YAAY,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC;AAC9E,YAAY,MAAM,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC;AAC5E,YAAY,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE;AACpC,QAAQ,EAAE;AACV,IAAI,GAAG;AACP,IAAI,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE;AAC7C,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,EAAE;AAChC,IAAI,MAAM,CAAC,KAAK,CAAC;AACjB;AACA,EAAE","sourcesContent":["\n// set up main nv object\nvar nv = {};\n\n// the major global objects under the nv namespace\nnv.dev = false; //set false when in production\nnv.tooltip = nv.tooltip || {}; // For the tooltip system\nnv.utils = nv.utils || {}; // Utility subsystem\nnv.models = nv.models || {}; //stores all the possible models/components\nnv.charts = {}; //stores all the ready to use charts\nnv.logs = {}; //stores some statistics and potential error messages\nnv.dom = {}; //DOM manipulation functions\n\n// Node/CommonJS - require D3\nif (typeof(module) !== 'undefined' && typeof(exports) !== 'undefined' && typeof(d3) == 'undefined') {\n    d3 = require('d3');\n}\n\nnv.dispatch = d3.dispatch('render_start', 'render_end');\n\n// Function bind polyfill\n// Needed ONLY for phantomJS as it's missing until version 2.0 which is unreleased as of this comment\n// https://github.com/ariya/phantomjs/issues/10522\n// http://kangax.github.io/compat-table/es5/#Function.prototype.bind\n// phantomJS is used for running the test suite\nif (!Function.prototype.bind) {\n    Function.prototype.bind = function (oThis) {\n        if (typeof this !== \"function\") {\n            // closest thing possible to the ECMAScript 5 internal IsCallable function\n            throw new TypeError(\"Function.prototype.bind - what is trying to be bound is not callable\");\n        }\n\n        var aArgs = Array.prototype.slice.call(arguments, 1),\n            fToBind = this,\n            fNOP = function () {},\n            fBound = function () {\n                return fToBind.apply(this instanceof fNOP && oThis\n                        ? this\n                        : oThis,\n                    aArgs.concat(Array.prototype.slice.call(arguments)));\n            };\n\n        fNOP.prototype = this.prototype;\n        fBound.prototype = new fNOP();\n        return fBound;\n    };\n}\n\n//  Development render timers - disabled if dev = false\nif (nv.dev) {\n    nv.dispatch.on('render_start', function(e) {\n        nv.logs.startTime = +new Date();\n    });\n\n    nv.dispatch.on('render_end', function(e) {\n        nv.logs.endTime = +new Date();\n        nv.logs.totalTime = nv.logs.endTime - nv.logs.startTime;\n        nv.log('total', nv.logs.totalTime); // used for development, to keep track of graph generation times\n    });\n}\n\n// Logs all arguments, and returns the last so you can test things in place\n// Note: in IE8 console.log is an object not a function, and if modernizr is used\n// then calling Function.prototype.bind with with anything other than a function\n// causes a TypeError to be thrown.\nnv.log = function() {\n    if (nv.dev && window.console && console.log && console.log.apply)\n        console.log.apply(console, arguments);\n    else if (nv.dev && window.console && typeof console.log == \"function\" && Function.prototype.bind) {\n        var log = Function.prototype.bind.call(console.log, console);\n        log.apply(console, arguments);\n    }\n    return arguments[arguments.length - 1];\n};\n\n// print console warning, should be used by deprecated functions\nnv.deprecated = function(name, info) {\n    if (console && console.warn) {\n        console.warn('nvd3 warning: `' + name + '` has been deprecated. ', info || '');\n    }\n};\n\n// The nv.render function is used to queue up chart rendering\n// in non-blocking async functions.\n// When all queued charts are done rendering, nv.dispatch.render_end is invoked.\nnv.render = function render(step) {\n    // number of graphs to generate in each timeout loop\n    step = step || 1;\n\n    nv.render.active = true;\n    nv.dispatch.render_start();\n\n    var renderLoop = function() {\n        var chart, graph;\n\n        for (var i = 0; i < step && (graph = nv.render.queue[i]); i++) {\n            chart = graph.generate();\n            if (typeof graph.callback == typeof(Function)) graph.callback(chart);\n        }\n\n        nv.render.queue.splice(0, i);\n\n        if (nv.render.queue.length) {\n            setTimeout(renderLoop);\n        }\n        else {\n            nv.dispatch.render_end();\n            nv.render.active = false;\n        }\n    };\n\n    setTimeout(renderLoop);\n};\n\nnv.render.active = false;\nnv.render.queue = [];\n\n/*\nAdds a chart to the async rendering queue. This method can take arguments in two forms:\nnv.addGraph({\n    generate: <Function>\n    callback: <Function>\n})\n\nor\n\nnv.addGraph(<generate Function>, <callback Function>)\n\nThe generate function should contain code that creates the NVD3 model, sets options\non it, adds data to an SVG element, and invokes the chart model. The generate function\nshould return the chart model.  See examples/lineChart.html for a usage example.\n\nThe callback function is optional, and it is called when the generate function completes.\n*/\nnv.addGraph = function(obj) {\n    if (typeof arguments[0] === typeof(Function)) {\n        obj = {generate: arguments[0], callback: arguments[1]};\n    }\n\n    nv.render.queue.push(obj);\n\n    if (!nv.render.active) {\n        nv.render();\n    }\n};\n\n// Node/CommonJS exports\nif (typeof(module) !== 'undefined' && typeof(exports) !== 'undefined') {\n  module.exports = nv;\n}\n\nif (typeof(window) !== 'undefined') {\n  window.nv = nv;\n}\n","/* Facade for queueing DOM write operations\r\n * with Fastdom (https://github.com/wilsonpage/fastdom)\r\n * if available.\r\n * This could easily be extended to support alternate\r\n * implementations in the future.\r\n */\r\nnv.dom.write = function(callback) {\r\n\tif (window.fastdom !== undefined) {\r\n\t\treturn fastdom.mutate(callback);\r\n\t}\r\n\treturn callback();\r\n};\r\n\r\n/* Facade for queueing DOM read operations\r\n * with Fastdom (https://github.com/wilsonpage/fastdom)\r\n * if available.\r\n * This could easily be extended to support alternate\r\n * implementations in the future.\r\n */\r\nnv.dom.read = function(callback) {\r\n\tif (window.fastdom !== undefined) {\r\n\t\treturn fastdom.measure(callback);\r\n\t}\r\n\treturn callback();\r\n};\r\n","/* Utility class to handle creation of an interactive layer.\n This places a rectangle on top of the chart. When you mouse move over it, it sends a dispatch\n containing the X-coordinate. It can also render a vertical line where the mouse is located.\n\n dispatch.elementMousemove is the important event to latch onto.  It is fired whenever the mouse moves over\n the rectangle. The dispatch is given one object which contains the mouseX/Y location.\n It also has 'pointXValue', which is the conversion of mouseX to the x-axis scale.\n */\nnv.interactiveGuideline = function() {\n    \"use strict\";\n\n    var margin = { left: 0, top: 0 } //Pass the chart's top and left magins. Used to calculate the mouseX/Y.\n        ,   width = null\n        ,   height = null\n        ,   xScale = d3.scale.linear()\n        ,   dispatch = d3.dispatch('elementMousemove', 'elementMouseout', 'elementClick', 'elementDblclick', 'elementMouseDown', 'elementMouseUp')\n        ,   showGuideLine = true\n        ,   svgContainer = null // Must pass the chart's svg, we'll use its mousemove event.\n        ,   tooltip = nv.models.tooltip()\n        ,   isMSIE =  window.ActiveXObject// Checkt if IE by looking for activeX. (excludes IE11)\n    ;\n\n    tooltip\n        .duration(0)\n        .hideDelay(0)\n        .hidden(false);\n\n    function layer(selection) {\n        selection.each(function(data) {\n            var container = d3.select(this);\n            var availableWidth = (width || 960), availableHeight = (height || 400);\n            var wrap = container.selectAll(\"g.nv-wrap.nv-interactiveLineLayer\")\n                .data([data]);\n            var wrapEnter = wrap.enter()\n                .append(\"g\").attr(\"class\", \" nv-wrap nv-interactiveLineLayer\");\n            wrapEnter.append(\"g\").attr(\"class\",\"nv-interactiveGuideLine\");\n\n            if (!svgContainer) {\n                return;\n            }\n\n            function mouseHandler() {\n                var d3mouse = d3.mouse(this);\n                var mouseX = d3mouse[0];\n                var mouseY = d3mouse[1];\n                var subtractMargin = true;\n                var mouseOutAnyReason = false;\n                if (isMSIE) {\n                    /*\n                     D3.js (or maybe SVG.getScreenCTM) has a nasty bug in Internet Explorer 10.\n                     d3.mouse() returns incorrect X,Y mouse coordinates when mouse moving\n                     over a rect in IE 10.\n                     However, d3.event.offsetX/Y also returns the mouse coordinates\n                     relative to the triggering <rect>. So we use offsetX/Y on IE.\n                     */\n                    mouseX = d3.event.offsetX;\n                    mouseY = d3.event.offsetY;\n\n                    /*\n                     On IE, if you attach a mouse event listener to the <svg> container,\n                     it will actually trigger it for all the child elements (like <path>, <circle>, etc).\n                     When this happens on IE, the offsetX/Y is set to where ever the child element\n                     is located.\n                     As a result, we do NOT need to subtract margins to figure out the mouse X/Y\n                     position under this scenario. Removing the line below *will* cause\n                     the interactive layer to not work right on IE.\n                     */\n                    if(d3.event.target.tagName !== \"svg\") {\n                        subtractMargin = false;\n                    }\n\n                    if (d3.event.target.className.baseVal.match(\"nv-legend\")) {\n                        mouseOutAnyReason = true;\n                    }\n\n                }\n\n                if(subtractMargin) {\n                    mouseX -= margin.left;\n                    mouseY -= margin.top;\n                }\n\n                /* If mouseX/Y is outside of the chart's bounds,\n                 trigger a mouseOut event.\n                 */\n                if (d3.event.type === 'mouseout'\n                    || mouseX < 0 || mouseY < 0\n                    || mouseX > availableWidth || mouseY > availableHeight\n                    || (d3.event.relatedTarget && d3.event.relatedTarget.ownerSVGElement === undefined)\n                    || mouseOutAnyReason\n                    ) {\n\n                    if (isMSIE) {\n                        if (d3.event.relatedTarget\n                            && d3.event.relatedTarget.ownerSVGElement === undefined\n                            && (d3.event.relatedTarget.className === undefined\n                                || d3.event.relatedTarget.className.match(tooltip.nvPointerEventsClass))) {\n\n                            return;\n                        }\n                    }\n                    dispatch.elementMouseout({\n                        mouseX: mouseX,\n                        mouseY: mouseY\n                    });\n                    layer.renderGuideLine(null); //hide the guideline\n                    tooltip.hidden(true);\n                    return;\n                } else {\n                    tooltip.hidden(false);\n                }\n\n\n                var scaleIsOrdinal = typeof xScale.rangeBands === 'function';\n                var pointXValue = undefined;\n\n                // Ordinal scale has no invert method\n                if (scaleIsOrdinal) {\n                    var elementIndex = d3.bisect(xScale.range(), mouseX) - 1;\n                    // Check if mouseX is in the range band\n                    if (xScale.range()[elementIndex] + xScale.rangeBand() >= mouseX) {\n                        pointXValue = xScale.domain()[d3.bisect(xScale.range(), mouseX) - 1];\n                    }\n                    else {\n                        dispatch.elementMouseout({\n                            mouseX: mouseX,\n                            mouseY: mouseY\n                        });\n                        layer.renderGuideLine(null); //hide the guideline\n                        tooltip.hidden(true);\n                        return;\n                    }\n                }\n                else {\n                    pointXValue = xScale.invert(mouseX);\n                }\n\n                dispatch.elementMousemove({\n                    mouseX: mouseX,\n                    mouseY: mouseY,\n                    pointXValue: pointXValue\n                });\n\n                //If user double clicks the layer, fire a elementDblclick\n                if (d3.event.type === \"dblclick\") {\n                    dispatch.elementDblclick({\n                        mouseX: mouseX,\n                        mouseY: mouseY,\n                        pointXValue: pointXValue\n                    });\n                }\n\n                // if user single clicks the layer, fire elementClick\n                if (d3.event.type === 'click') {\n                    dispatch.elementClick({\n                        mouseX: mouseX,\n                        mouseY: mouseY,\n                        pointXValue: pointXValue\n                    });\n                }\n\n                // if user presses mouse down the layer, fire elementMouseDown\n                if (d3.event.type === 'mousedown') {\n                \tdispatch.elementMouseDown({\n                \t\tmouseX: mouseX,\n                \t\tmouseY: mouseY,\n                \t\tpointXValue: pointXValue\n                \t});\n                }\n\n                // if user presses mouse down the layer, fire elementMouseUp\n                if (d3.event.type === 'mouseup') {\n                \tdispatch.elementMouseUp({\n                \t\tmouseX: mouseX,\n                \t\tmouseY: mouseY,\n                \t\tpointXValue: pointXValue\n                \t});\n                }\n            }\n\n            svgContainer\n                .on(\"touchmove\",mouseHandler)\n                .on(\"mousemove\",mouseHandler, true)\n                .on(\"mouseout\" ,mouseHandler,true)\n                .on(\"mousedown\" ,mouseHandler,true)\n                .on(\"mouseup\" ,mouseHandler,true)\n                .on(\"dblclick\" ,mouseHandler)\n                .on(\"click\", mouseHandler)\n            ;\n\n            layer.guideLine = null;\n            //Draws a vertical guideline at the given X postion.\n            layer.renderGuideLine = function(x) {\n                if (!showGuideLine) return;\n                if (layer.guideLine && layer.guideLine.attr(\"x1\") === x) return;\n                nv.dom.write(function() {\n                    var line = wrap.select(\".nv-interactiveGuideLine\")\n                        .selectAll(\"line\")\n                        .data((x != null) ? [nv.utils.NaNtoZero(x)] : [], String);\n                    line.enter()\n                        .append(\"line\")\n                        .attr(\"class\", \"nv-guideline\")\n                        .attr(\"x1\", function(d) { return d;})\n                        .attr(\"x2\", function(d) { return d;})\n                        .attr(\"y1\", availableHeight)\n                        .attr(\"y2\",0);\n                    line.exit().remove();\n                });\n            }\n        });\n    }\n\n    layer.dispatch = dispatch;\n    layer.tooltip = tooltip;\n\n    layer.margin = function(_) {\n        if (!arguments.length) return margin;\n        margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;\n        margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;\n        return layer;\n    };\n\n    layer.width = function(_) {\n        if (!arguments.length) return width;\n        width = _;\n        return layer;\n    };\n\n    layer.height = function(_) {\n        if (!arguments.length) return height;\n        height = _;\n        return layer;\n    };\n\n    layer.xScale = function(_) {\n        if (!arguments.length) return xScale;\n        xScale = _;\n        return layer;\n    };\n\n    layer.showGuideLine = function(_) {\n        if (!arguments.length) return showGuideLine;\n        showGuideLine = _;\n        return layer;\n    };\n\n    layer.svgContainer = function(_) {\n        if (!arguments.length) return svgContainer;\n        svgContainer = _;\n        return layer;\n    };\n\n    return layer;\n};\n\n/* Utility class that uses d3.bisect to find the index in a given array, where a search value can be inserted.\n This is different from normal bisectLeft; this function finds the nearest index to insert the search value.\n\n For instance, lets say your array is [1,2,3,5,10,30], and you search for 28.\n Normal d3.bisectLeft will return 4, because 28 is inserted after the number 10.  But interactiveBisect will return 5\n because 28 is closer to 30 than 10.\n\n Unit tests can be found in: interactiveBisectTest.html\n\n Has the following known issues:\n * Will not work if the data points move backwards (ie, 10,9,8,7, etc) or if the data points are in random order.\n * Won't work if there are duplicate x coordinate values.\n */\nnv.interactiveBisect = function (values, searchVal, xAccessor) {\n    \"use strict\";\n    if (! (values instanceof Array)) {\n        return null;\n    }\n    var _xAccessor;\n    if (typeof xAccessor !== 'function') {\n        _xAccessor = function(d) {\n            return d.x;\n        }\n    } else {\n        _xAccessor = xAccessor;\n    }\n    var _cmp = function(d, v) {\n        // Accessors are no longer passed the index of the element along with\n        // the element itself when invoked by d3.bisector.\n        //\n        // Starting at D3 v3.4.4, d3.bisector() started inspecting the\n        // function passed to determine if it should consider it an accessor\n        // or a comparator. This meant that accessors that take two arguments\n        // (expecting an index as the second parameter) are treated as\n        // comparators where the second argument is the search value against\n        // which the first argument is compared.\n        return _xAccessor(d) - v;\n    };\n\n    var bisect = d3.bisector(_cmp).left;\n    var index = d3.max([0, bisect(values,searchVal) - 1]);\n    var currentValue = _xAccessor(values[index]);\n\n    if (typeof currentValue === 'undefined') {\n        currentValue = index;\n    }\n\n    if (currentValue === searchVal) {\n        return index; //found exact match\n    }\n\n    var nextIndex = d3.min([index+1, values.length - 1]);\n    var nextValue = _xAccessor(values[nextIndex]);\n\n    if (typeof nextValue === 'undefined') {\n        nextValue = nextIndex;\n    }\n\n    if (Math.abs(nextValue - searchVal) >= Math.abs(currentValue - searchVal)) {\n        return index;\n    } else {\n        return nextIndex\n    }\n};\n\n/*\n Returns the index in the array \"values\" that is closest to searchVal.\n Only returns an index if searchVal is within some \"threshold\".\n Otherwise, returns null.\n */\nnv.nearestValueIndex = function (values, searchVal, threshold) {\n    \"use strict\";\n    var yDistMax = Infinity, indexToHighlight = null;\n    values.forEach(function(d,i) {\n        var delta = Math.abs(searchVal - d);\n        if ( d != null && delta <= yDistMax && delta < threshold) {\n            yDistMax = delta;\n            indexToHighlight = i;\n        }\n    });\n    return indexToHighlight;\n};\n","\n/* Model which can be instantiated to handle tooltip rendering.\n Example usage:\n var tip = nv.models.tooltip().gravity('w').distance(23)\n .data(myDataObject);\n\n tip();    //just invoke the returned function to render tooltip.\n */\nnv.models.tooltip = function() {\n    \"use strict\";\n\n    /*\n    Tooltip data. If data is given in the proper format, a consistent tooltip is generated.\n    Example Format of data:\n    {\n        key: \"Date\",\n        value: \"August 2009\",\n        series: [\n            {key: \"Series 1\", value: \"Value 1\", color: \"#000\"},\n            {key: \"Series 2\", value: \"Value 2\", color: \"#00f\"}\n        ]\n    }\n    */\n    var id = \"nvtooltip-\" + Math.floor(Math.random() * 100000) // Generates a unique id when you create a new tooltip() object.\n        ,   data = null\n        ,   gravity = 'w'   // Can be 'n','s','e','w'. Determines how tooltip is positioned.\n        ,   distance = 25 // Distance to offset tooltip from the mouse location.\n        ,   snapDistance = 0   // Tolerance allowed before tooltip is moved from its current position (creates 'snapping' effect)\n        ,   classes = null  // Attaches additional CSS classes to the tooltip DIV that is created.\n        ,   hidden = true  // Start off hidden, toggle with hide/show functions below.\n        ,   hideDelay = 200  // Delay (in ms) before the tooltip hides after calling hide().\n        ,   tooltip = null // d3 select of the tooltip div.\n        ,   lastPosition = { left: null, top: null } // Last position the tooltip was in.\n        ,   enabled = true  // True -> tooltips are rendered. False -> don't render tooltips.\n        ,   duration = 100 // Tooltip movement duration, in ms.\n        ,   headerEnabled = true // If is to show the tooltip header.\n        ,   nvPointerEventsClass = \"nv-pointer-events-none\" // CSS class to specify whether element should not have mouse events.\n    ;\n\n    // Format function for the tooltip values column.\n    var valueFormatter = function(d, i) {\n        return d;\n    };\n\n    // Format function for the tooltip header value.\n    var headerFormatter = function(d) {\n        return d;\n    };\n\n    var keyFormatter = function(d, i) {\n        return d;\n    };\n\n    // By default, the tooltip model renders a beautiful table inside a DIV.\n    // You can override this function if a custom tooltip is desired.\n    var contentGenerator = function(d) {\n        if (d === null) {\n            return '';\n        }\n\n        var table = d3.select(document.createElement(\"table\"));\n        if (headerEnabled) {\n            var theadEnter = table.selectAll(\"thead\")\n                .data([d])\n                .enter().append(\"thead\");\n\n            theadEnter.append(\"tr\")\n                .append(\"td\")\n                .attr(\"colspan\", 3)\n                .append(\"strong\")\n                .classed(\"x-value\", true)\n                .html(headerFormatter(d.value));\n        }\n\n        var tbodyEnter = table.selectAll(\"tbody\")\n            .data([d])\n            .enter().append(\"tbody\");\n\n        var trowEnter = tbodyEnter.selectAll(\"tr\")\n                .data(function(p) { return p.series})\n                .enter()\n                .append(\"tr\")\n                .classed(\"highlight\", function(p) { return p.highlight});\n\n        trowEnter.append(\"td\")\n            .classed(\"legend-color-guide\",true)\n            .append(\"div\")\n            .style(\"background-color\", function(p) { return p.color});\n\n        trowEnter.append(\"td\")\n            .classed(\"key\",true)\n            .classed(\"total\",function(p) { return !!p.total})\n            .html(function(p, i) { return keyFormatter(p.key, i)});\n\n        trowEnter.append(\"td\")\n            .classed(\"value\",true)\n            .html(function(p, i) { return valueFormatter(p.value, i) });\n\n        trowEnter.filter(function (p,i) { return p.percent !== undefined }).append(\"td\")\n            .classed(\"percent\", true)\n            .html(function(p, i) { return \"(\" + d3.format('%')(p.percent) + \")\" });\n\n        trowEnter.selectAll(\"td\").each(function(p) {\n            if (p.highlight) {\n                var opacityScale = d3.scale.linear().domain([0,1]).range([\"#fff\",p.color]);\n                var opacity = 0.6;\n                d3.select(this)\n                    .style(\"border-bottom-color\", opacityScale(opacity))\n                    .style(\"border-top-color\", opacityScale(opacity))\n                ;\n            }\n        });\n\n        var html = table.node().outerHTML;\n        if (d.footer !== undefined)\n            html += \"<div class='footer'>\" + d.footer + \"</div>\";\n        return html;\n\n    };\n\n    /*\n     Function that returns the position (relative to the viewport/document.body)\n     the tooltip should be placed in.\n     Should return: {\n        left: <leftPos>,\n        top: <topPos>\n     }\n     */\n    var position = function() {\n        var pos = {\n            left: d3.event !== null ? d3.event.clientX : 0,\n            top: d3.event !== null ? d3.event.clientY : 0\n        };\n\n        if(getComputedStyle(document.body).transform != 'none') {\n            // Take the offset into account, as now the tooltip is relative\n            // to document.body.\n            var client = document.body.getBoundingClientRect();\n            pos.left -= client.left;\n            pos.top -= client.top;\n        }\n\n        return pos;\n    };\n\n    var dataSeriesExists = function(d) {\n        if (d && d.series) {\n            if (nv.utils.isArray(d.series)) {\n                return true;\n            }\n            // if object, it's okay just convert to array of the object\n            if (nv.utils.isObject(d.series)) {\n                d.series = [d.series];\n                return true;\n            }\n        }\n        return false;\n    };\n\n    // Calculates the gravity offset of the tooltip. Parameter is position of tooltip\n    // relative to the viewport.\n    var calcGravityOffset = function(pos) {\n        var height = tooltip.node().offsetHeight,\n            width = tooltip.node().offsetWidth,\n            clientWidth = document.documentElement.clientWidth, // Don't want scrollbars.\n            clientHeight = document.documentElement.clientHeight, // Don't want scrollbars.\n            left, top, tmp;\n\n        // calculate position based on gravity\n        switch (gravity) {\n            case 'e':\n                left = - width - distance;\n                top = - (height / 2);\n                if(pos.left + left < 0) left = distance;\n                if((tmp = pos.top + top) < 0) top -= tmp;\n                if((tmp = pos.top + top + height) > clientHeight) top -= tmp - clientHeight;\n                break;\n            case 'w':\n                left = distance;\n                top = - (height / 2);\n                if (pos.left + left + width > clientWidth) left = - width - distance;\n                if ((tmp = pos.top + top) < 0) top -= tmp;\n                if ((tmp = pos.top + top + height) > clientHeight) top -= tmp - clientHeight;\n                break;\n            case 'n':\n                left = - (width / 2) - 5; // - 5 is an approximation of the mouse's height.\n                top = distance;\n                if (pos.top + top + height > clientHeight) top = - height - distance;\n                if ((tmp = pos.left + left) < 0) left -= tmp;\n                if ((tmp = pos.left + left + width) > clientWidth) left -= tmp - clientWidth;\n                break;\n            case 's':\n                left = - (width / 2);\n                top = - height - distance;\n                if (pos.top + top < 0) top = distance;\n                if ((tmp = pos.left + left) < 0) left -= tmp;\n                if ((tmp = pos.left + left + width) > clientWidth) left -= tmp - clientWidth;\n                break;\n            case 'center':\n                left = - (width / 2);\n                top = - (height / 2);\n                break;\n            default:\n                left = 0;\n                top = 0;\n                break;\n        }\n\n        return { 'left': left, 'top': top };\n    };\n\n    /*\n     Positions the tooltip in the correct place, as given by the position() function.\n     */\n    var positionTooltip = function() {\n        nv.dom.read(function() {\n            var pos = position(),\n                gravityOffset = calcGravityOffset(pos),\n                left = pos.left + gravityOffset.left,\n                top = pos.top + gravityOffset.top;\n\n            // delay hiding a bit to avoid flickering\n            if (hidden) {\n                tooltip\n                    .interrupt()\n                    .transition()\n                    .delay(hideDelay)\n                    .duration(0)\n                    .style('opacity', 0);\n            } else {\n                // using tooltip.style('transform') returns values un-usable for tween\n                var old_translate = 'translate(' + lastPosition.left + 'px, ' + lastPosition.top + 'px)';\n                var new_translate = 'translate(' + Math.round(left) + 'px, ' + Math.round(top) + 'px)';\n                var translateInterpolator = d3.interpolateString(old_translate, new_translate);\n                var is_hidden = tooltip.style('opacity') < 0.1;\n\n                tooltip\n                    .interrupt() // cancel running transitions\n                    .transition()\n                    .duration(is_hidden ? 0 : duration)\n                    // using tween since some versions of d3 can't auto-tween a translate on a div\n                    .styleTween('transform', function (d) {\n                        return translateInterpolator;\n                    }, 'important')\n                    // Safari has its own `-webkit-transform` and does not support `transform`\n                    .styleTween('-webkit-transform', function (d) {\n                        return translateInterpolator;\n                    })\n                    .style('-ms-transform', new_translate)\n                    .style('opacity', 1);\n            }\n\n            lastPosition.left = left;\n            lastPosition.top = top;\n        });\n    };\n\n    // Creates new tooltip container, or uses existing one on DOM.\n    function initTooltip() {\n        if (!tooltip || !tooltip.node()) {\n            // Create new tooltip div if it doesn't exist on DOM.\n\n            var data = [1];\n            tooltip = d3.select(document.body).select('#'+id).data(data);\n\n            tooltip.enter().append('div')\n                   .attr(\"class\", \"nvtooltip \" + (classes ? classes : \"xy-tooltip\"))\n                   .attr(\"id\", id)\n                   .style(\"top\", 0).style(\"left\", 0)\n                   .style('opacity', 0)\n                   .style('position', 'fixed')\n                   .selectAll(\"div, table, td, tr\").classed(nvPointerEventsClass, true)\n                   .classed(nvPointerEventsClass, true);\n\n            tooltip.exit().remove()\n        }\n    }\n\n    // Draw the tooltip onto the DOM.\n    function nvtooltip() {\n        if (!enabled) return;\n        if (!dataSeriesExists(data)) return;\n\n        nv.dom.write(function () {\n            initTooltip();\n            // Generate data and set it into tooltip.\n            // Bonus - If you override contentGenerator and return falsey you can use something like\n            //         React or Knockout to bind the data for your tooltip.\n            var newContent = contentGenerator(data);\n            if (newContent) {\n                tooltip.node().innerHTML = newContent;\n            }\n\n            positionTooltip();\n        });\n\n        return nvtooltip;\n    }\n\n    nvtooltip.nvPointerEventsClass = nvPointerEventsClass;\n    nvtooltip.options = nv.utils.optionsFunc.bind(nvtooltip);\n\n    nvtooltip._options = Object.create({}, {\n        // simple read/write options\n        duration: {get: function(){return duration;}, set: function(_){duration=_;}},\n        gravity: {get: function(){return gravity;}, set: function(_){gravity=_;}},\n        distance: {get: function(){return distance;}, set: function(_){distance=_;}},\n        snapDistance: {get: function(){return snapDistance;}, set: function(_){snapDistance=_;}},\n        classes: {get: function(){return classes;}, set: function(_){classes=_;}},\n        enabled: {get: function(){return enabled;}, set: function(_){enabled=_;}},\n        hideDelay: {get: function(){return hideDelay;}, set: function(_){hideDelay=_;}},\n        contentGenerator: {get: function(){return contentGenerator;}, set: function(_){contentGenerator=_;}},\n        valueFormatter: {get: function(){return valueFormatter;}, set: function(_){valueFormatter=_;}},\n        headerFormatter: {get: function(){return headerFormatter;}, set: function(_){headerFormatter=_;}},\n        keyFormatter: {get: function(){return keyFormatter;}, set: function(_){keyFormatter=_;}},\n        headerEnabled: {get: function(){return headerEnabled;}, set: function(_){headerEnabled=_;}},\n        position: {get: function(){return position;}, set: function(_){position=_;}},\n\n        // Deprecated options\n        chartContainer: {get: function(){return document.body;}, set: function(_){\n            // deprecated after 1.8.3\n            nv.deprecated('chartContainer', 'feature removed after 1.8.3');\n        }},\n        fixedTop: {get: function(){return null;}, set: function(_){\n            // deprecated after 1.8.1\n            nv.deprecated('fixedTop', 'feature removed after 1.8.1');\n        }},\n        offset: {get: function(){return {left: 0, top: 0};}, set: function(_){\n            // deprecated after 1.8.1\n            nv.deprecated('offset', 'use chart.tooltip.distance() instead');\n        }},\n\n        // options with extra logic\n        hidden: {get: function(){return hidden;}, set: function(_){\n            if (hidden != _) {\n                hidden = !!_;\n                nvtooltip();\n            }\n        }},\n        data: {get: function(){return data;}, set: function(_){\n            // if showing a single data point, adjust data format with that\n            if (_.point) {\n                _.value = _.point.x;\n                _.series = _.series || {};\n                _.series.value = _.point.y;\n                _.series.color = _.point.color || _.series.color;\n            }\n            data = _;\n        }},\n\n        // read only properties\n        node: {get: function(){return tooltip.node();}, set: function(_){}},\n        id: {get: function(){return id;}, set: function(_){}}\n    });\n\n    nv.utils.initOptions(nvtooltip);\n    return nvtooltip;\n};\n","\n\n/*\nGets the browser window size\n\nReturns object with height and width properties\n */\nnv.utils.windowSize = function() {\n    // Sane defaults\n    var size = {width: 640, height: 480};\n\n    // Most recent browsers use\n    if (window.innerWidth && window.innerHeight) {\n        size.width = window.innerWidth;\n        size.height = window.innerHeight;\n        return (size);\n    }\n\n    // IE can use depending on mode it is in\n    if (document.compatMode=='CSS1Compat' &&\n        document.documentElement &&\n        document.documentElement.offsetWidth ) {\n\n        size.width = document.documentElement.offsetWidth;\n        size.height = document.documentElement.offsetHeight;\n        return (size);\n    }\n\n    // Earlier IE uses Doc.body\n    if (document.body && document.body.offsetWidth) {\n        size.width = document.body.offsetWidth;\n        size.height = document.body.offsetHeight;\n        return (size);\n    }\n\n    return (size);\n};\n\n\n/* handle dumb browser quirks...  isinstance breaks if you use frames\ntypeof returns 'object' for null, NaN is a number, etc.\n */\nnv.utils.isArray = Array.isArray;\nnv.utils.isObject = function(a) {\n    return a !== null && typeof a === 'object';\n};\nnv.utils.isFunction = function(a) {\n    return typeof a === 'function';\n};\nnv.utils.isDate = function(a) {\n    return toString.call(a) === '[object Date]';\n};\nnv.utils.isNumber = function(a) {\n    return !isNaN(a) && typeof a === 'number';\n};\n\n\n/*\nBinds callback function to run when window is resized\n */\nnv.utils.windowResize = function(handler) {\n    if (window.addEventListener) {\n        window.addEventListener('resize', handler);\n    } else {\n        nv.log(\"ERROR: Failed to bind to window.resize with: \", handler);\n    }\n    // return object with clear function to remove the single added callback.\n    return {\n        callback: handler,\n        clear: function() {\n            window.removeEventListener('resize', handler);\n        }\n    }\n};\n\n\n/*\nBackwards compatible way to implement more d3-like coloring of graphs.\nCan take in nothing, an array, or a function/scale\nTo use a normal scale, get the range and pass that because we must be able\nto take two arguments and use the index to keep backward compatibility\n*/\nnv.utils.getColor = function(color) {\n    //if you pass in nothing, get default colors back\n    if (color === undefined) {\n        return nv.utils.defaultColor();\n\n    //if passed an array, turn it into a color scale\n    } else if(nv.utils.isArray(color)) {\n        var color_scale = d3.scale.ordinal().range(color);\n        return function(d, i) {\n            var key = i === undefined ? d : i;\n            return d.color || color_scale(key);\n        };\n\n    //if passed a function or scale, return it, or whatever it may be\n    //external libs, such as angularjs-nvd3-directives use this\n    } else {\n        //can't really help it if someone passes rubbish as color\n        return color;\n    }\n};\n\n\n/*\nDefault color chooser uses a color scale of 20 colors from D3\n https://github.com/mbostock/d3/wiki/Ordinal-Scales#categorical-colors\n */\nnv.utils.defaultColor = function() {\n    // get range of the scale so we'll turn it into our own function.\n    return nv.utils.getColor(d3.scale.category20().range());\n};\n\n\n/*\nReturns a color function that takes the result of 'getKey' for each series and\nlooks for a corresponding color from the dictionary\n*/\nnv.utils.customTheme = function(dictionary, getKey, defaultColors) {\n    // use default series.key if getKey is undefined\n    getKey = getKey || function(series) { return series.key };\n    defaultColors = defaultColors || d3.scale.category20().range();\n\n    // start at end of default color list and walk back to index 0\n    var defIndex = defaultColors.length;\n\n    return function(series, index) {\n        var key = getKey(series);\n        if (nv.utils.isFunction(dictionary[key])) {\n            return dictionary[key]();\n        } else if (dictionary[key] !== undefined) {\n            return dictionary[key];\n        } else {\n            // no match in dictionary, use a default color\n            if (!defIndex) {\n                // used all the default colors, start over\n                defIndex = defaultColors.length;\n            }\n            defIndex = defIndex - 1;\n            return defaultColors[defIndex];\n        }\n    };\n};\n\n\n/*\nFrom the PJAX example on d3js.org, while this is not really directly needed\nit's a very cool method for doing pjax, I may expand upon it a little bit,\nopen to suggestions on anything that may be useful\n*/\nnv.utils.pjax = function(links, content) {\n\n    var load = function(href) {\n        d3.html(href, function(fragment) {\n            var target = d3.select(content).node();\n            target.parentNode.replaceChild(\n                d3.select(fragment).select(content).node(),\n                target);\n            nv.utils.pjax(links, content);\n        });\n    };\n\n    d3.selectAll(links).on(\"click\", function() {\n        history.pushState(this.href, this.textContent, this.href);\n        load(this.href);\n        d3.event.preventDefault();\n    });\n\n    d3.select(window).on(\"popstate\", function() {\n        if (d3.event.state) {\n            load(d3.event.state);\n        }\n    });\n};\n\n\n/*\nFor when we want to approximate the width in pixels for an SVG:text element.\nMost common instance is when the element is in a display:none; container.\nForumla is : text.length * font-size * constant_factor\n*/\nnv.utils.calcApproxTextWidth = function (svgTextElem) {\n    if (nv.utils.isFunction(svgTextElem.style) && nv.utils.isFunction(svgTextElem.text)) {\n        var fontSize = parseInt(svgTextElem.style(\"font-size\").replace(\"px\",\"\"), 10);\n        var textLength = svgTextElem.text().length;\n        return nv.utils.NaNtoZero(textLength * fontSize * 0.5);\n    }\n    return 0;\n};\n\n\n/*\nNumbers that are undefined, null or NaN, convert them to zeros.\n*/\nnv.utils.NaNtoZero = function(n) {\n    if (!nv.utils.isNumber(n)\n        || isNaN(n)\n        || n === null\n        || n === Infinity\n        || n === -Infinity) {\n\n        return 0;\n    }\n    return n;\n};\n\n/*\nAdd a way to watch for d3 transition ends to d3\n*/\nd3.selection.prototype.watchTransition = function(renderWatch){\n    var args = [this].concat([].slice.call(arguments, 1));\n    return renderWatch.transition.apply(renderWatch, args);\n};\n\n\n/*\nHelper object to watch when d3 has rendered something\n*/\nnv.utils.renderWatch = function(dispatch, duration) {\n    if (!(this instanceof nv.utils.renderWatch)) {\n        return new nv.utils.renderWatch(dispatch, duration);\n    }\n\n    var _duration = duration !== undefined ? duration : 250;\n    var renderStack = [];\n    var self = this;\n\n    this.models = function(models) {\n        models = [].slice.call(arguments, 0);\n        models.forEach(function(model){\n            model.__rendered = false;\n            (function(m){\n                m.dispatch.on('renderEnd', function(arg){\n                    m.__rendered = true;\n                    self.renderEnd('model');\n                });\n            })(model);\n\n            if (renderStack.indexOf(model) < 0) {\n                renderStack.push(model);\n            }\n        });\n    return this;\n    };\n\n    this.reset = function(duration) {\n        if (duration !== undefined) {\n            _duration = duration;\n        }\n        renderStack = [];\n    };\n\n    this.transition = function(selection, args, duration) {\n        args = arguments.length > 1 ? [].slice.call(arguments, 1) : [];\n\n        if (args.length > 1) {\n            duration = args.pop();\n        } else {\n            duration = _duration !== undefined ? _duration : 250;\n        }\n        selection.__rendered = false;\n\n        if (renderStack.indexOf(selection) < 0) {\n            renderStack.push(selection);\n        }\n\n        if (duration === 0) {\n            selection.__rendered = true;\n            selection.delay = function() { return this; };\n            selection.duration = function() { return this; };\n            return selection;\n        } else {\n            if (selection.length === 0) {\n                selection.__rendered = true;\n            } else if (selection.every( function(d){ return !d.length; } )) {\n                selection.__rendered = true;\n            } else {\n                selection.__rendered = false;\n            }\n\n            var n = 0;\n            return selection\n                .transition()\n                .duration(duration)\n                .each(function(){ ++n; })\n                .each('end', function(d, i) {\n                    if (--n === 0) {\n                        selection.__rendered = true;\n                        self.renderEnd.apply(this, args);\n                    }\n                });\n        }\n    };\n\n    this.renderEnd = function() {\n        if (renderStack.every( function(d){ return d.__rendered; } )) {\n            renderStack.forEach( function(d){ d.__rendered = false; });\n            dispatch.renderEnd.apply(this, arguments);\n        }\n    }\n\n};\n\n\n/*\nTakes multiple objects and combines them into the first one (dst)\nexample:  nv.utils.deepExtend({a: 1}, {a: 2, b: 3}, {c: 4});\ngives:  {a: 2, b: 3, c: 4}\n*/\nnv.utils.deepExtend = function(dst){\n    var sources = arguments.length > 1 ? [].slice.call(arguments, 1) : [];\n    sources.forEach(function(source) {\n        for (var key in source) {\n            var isArray = nv.utils.isArray(dst[key]);\n            var isObject = nv.utils.isObject(dst[key]);\n            var srcObj = nv.utils.isObject(source[key]);\n\n            if (isObject && !isArray && srcObj) {\n                nv.utils.deepExtend(dst[key], source[key]);\n            } else {\n                dst[key] = source[key];\n            }\n        }\n    });\n};\n\n\n/*\nstate utility object, used to track d3 states in the models\n*/\nnv.utils.state = function(){\n    if (!(this instanceof nv.utils.state)) {\n        return new nv.utils.state();\n    }\n    var state = {};\n    var _self = this;\n    var _setState = function(){};\n    var _getState = function(){ return {}; };\n    var init = null;\n    var changed = null;\n\n    this.dispatch = d3.dispatch('change', 'set');\n\n    this.dispatch.on('set', function(state){\n        _setState(state, true);\n    });\n\n    this.getter = function(fn){\n        _getState = fn;\n        return this;\n    };\n\n    this.setter = function(fn, callback) {\n        if (!callback) {\n            callback = function(){};\n        }\n        _setState = function(state, update){\n            fn(state);\n            if (update) {\n                callback();\n            }\n        };\n        return this;\n    };\n\n    this.init = function(state){\n        init = init || {};\n        nv.utils.deepExtend(init, state);\n    };\n\n    var _set = function(){\n        var settings = _getState();\n\n        if (JSON.stringify(settings) === JSON.stringify(state)) {\n            return false;\n        }\n\n        for (var key in settings) {\n            if (state[key] === undefined) {\n                state[key] = {};\n            }\n            state[key] = settings[key];\n            changed = true;\n        }\n        return true;\n    };\n\n    this.update = function(){\n        if (init) {\n            _setState(init, false);\n            init = null;\n        }\n        if (_set.call(this)) {\n            this.dispatch.change(state);\n        }\n    };\n\n};\n\n\n/*\nSnippet of code you can insert into each nv.models.* to give you the ability to\ndo things like:\nchart.options({\n  showXAxis: true,\n  tooltips: true\n});\n\nTo enable in the chart:\nchart.options = nv.utils.optionsFunc.bind(chart);\n*/\nnv.utils.optionsFunc = function(args) {\n    if (args) {\n        d3.map(args).forEach((function(key,value) {\n            if (nv.utils.isFunction(this[key])) {\n                this[key](value);\n            }\n        }).bind(this));\n    }\n    return this;\n};\n\n\n/*\nnumTicks:  requested number of ticks\ndata:  the chart data\n\nreturns the number of ticks to actually use on X axis, based on chart data\nto avoid duplicate ticks with the same value\n*/\nnv.utils.calcTicksX = function(numTicks, data) {\n    // find max number of values from all data streams\n    var numValues = 1;\n    var i = 0;\n    for (i; i < data.length; i += 1) {\n        var stream_len = data[i] && data[i].values ? data[i].values.length : 0;\n        numValues = stream_len > numValues ? stream_len : numValues;\n    }\n    nv.log(\"Requested number of ticks: \", numTicks);\n    nv.log(\"Calculated max values to be: \", numValues);\n    // make sure we don't have more ticks than values to avoid duplicates\n    numTicks = numTicks > numValues ? numTicks = numValues - 1 : numTicks;\n    // make sure we have at least one tick\n    numTicks = numTicks < 1 ? 1 : numTicks;\n    // make sure it's an integer\n    numTicks = Math.floor(numTicks);\n    nv.log(\"Calculating tick count as: \", numTicks);\n    return numTicks;\n};\n\n\n/*\nreturns number of ticks to actually use on Y axis, based on chart data\n*/\nnv.utils.calcTicksY = function(numTicks, data) {\n    // currently uses the same logic but we can adjust here if needed later\n    return nv.utils.calcTicksX(numTicks, data);\n};\n\n\n/*\nAdd a particular option from an options object onto chart\nOptions exposed on a chart are a getter/setter function that returns chart\non set to mimic typical d3 option chaining, e.g. svg.option1('a').option2('b');\n\noption objects should be generated via Object.create() to provide\nthe option of manipulating data via get/set functions.\n*/\nnv.utils.initOption = function(chart, name) {\n    // if it's a call option, just call it directly, otherwise do get/set\n    if (chart._calls && chart._calls[name]) {\n        chart[name] = chart._calls[name];\n    } else {\n        chart[name] = function (_) {\n            if (!arguments.length) return chart._options[name];\n            chart._overrides[name] = true;\n            chart._options[name] = _;\n            return chart;\n        };\n        // calling the option as _option will ignore if set by option already\n        // so nvd3 can set options internally but the stop if set manually\n        chart['_' + name] = function(_) {\n            if (!arguments.length) return chart._options[name];\n            if (!chart._overrides[name]) {\n                chart._options[name] = _;\n            }\n            return chart;\n        }\n    }\n};\n\n\n/*\nAdd all options in an options object to the chart\n*/\nnv.utils.initOptions = function(chart) {\n    chart._overrides = chart._overrides || {};\n    var ops = Object.getOwnPropertyNames(chart._options || {});\n    var calls = Object.getOwnPropertyNames(chart._calls || {});\n    ops = ops.concat(calls);\n    for (var i in ops) {\n        nv.utils.initOption(chart, ops[i]);\n    }\n};\n\n\n/*\nInherit options from a D3 object\nd3.rebind makes calling the function on target actually call it on source\nAlso use _d3options so we can track what we inherit for documentation and chained inheritance\n*/\nnv.utils.inheritOptionsD3 = function(target, d3_source, oplist) {\n    target._d3options = oplist.concat(target._d3options || []);\n    oplist.unshift(d3_source);\n    oplist.unshift(target);\n    d3.rebind.apply(this, oplist);\n};\n\n\n/*\nRemove duplicates from an array\n*/\nnv.utils.arrayUnique = function(a) {\n    return a.sort().filter(function(item, pos) {\n        return !pos || item != a[pos - 1];\n    });\n};\n\n\n/*\nKeeps a list of custom symbols to draw from in addition to d3.svg.symbol\nNecessary since d3 doesn't let you extend its list -_-\nAdd new symbols by doing nv.utils.symbols.set('name', function(size){...});\n*/\nnv.utils.symbolMap = d3.map();\n\n\n/*\nReplaces d3.svg.symbol so that we can look both there and our own map\n */\nnv.utils.symbol = function() {\n    var type,\n        size = 64;\n    function symbol(d,i) {\n        var t = type.call(this,d,i);\n        var s = size.call(this,d,i);\n        if (d3.svg.symbolTypes.indexOf(t) !== -1) {\n            return d3.svg.symbol().type(t).size(s)();\n        } else {\n            return nv.utils.symbolMap.get(t)(s);\n        }\n    }\n    symbol.type = function(_) {\n        if (!arguments.length) return type;\n        type = d3.functor(_);\n        return symbol;\n    };\n    symbol.size = function(_) {\n        if (!arguments.length) return size;\n        size = d3.functor(_);\n        return symbol;\n    };\n    return symbol;\n};\n\n\n/*\nInherit option getter/setter functions from source to target\nd3.rebind makes calling the function on target actually call it on source\nAlso track via _inherited and _d3options so we can track what we inherit\nfor documentation generation purposes and chained inheritance\n*/\nnv.utils.inheritOptions = function(target, source) {\n    // inherit all the things\n    var ops = Object.getOwnPropertyNames(source._options || {});\n    var calls = Object.getOwnPropertyNames(source._calls || {});\n    var inherited = source._inherited || [];\n    var d3ops = source._d3options || [];\n    var args = ops.concat(calls).concat(inherited).concat(d3ops);\n    args.unshift(source);\n    args.unshift(target);\n    d3.rebind.apply(this, args);\n    // pass along the lists to keep track of them, don't allow duplicates\n    target._inherited = nv.utils.arrayUnique(ops.concat(calls).concat(inherited).concat(ops).concat(target._inherited || []));\n    target._d3options = nv.utils.arrayUnique(d3ops.concat(target._d3options || []));\n};\n\n\n/*\nRuns common initialize code on the svg before the chart builds\n*/\nnv.utils.initSVG = function(svg) {\n    svg.classed({'nvd3-svg':true});\n};\n\n\n/*\nSanitize and provide default for the container height.\n*/\nnv.utils.sanitizeHeight = function(height, container) {\n    return (height || parseInt(container.style('height'), 10) || 400);\n};\n\n\n/*\nSanitize and provide default for the container width.\n*/\nnv.utils.sanitizeWidth = function(width, container) {\n    return (width || parseInt(container.style('width'), 10) || 960);\n};\n\n\n/*\nCalculate the available height for a chart.\n*/\nnv.utils.availableHeight = function(height, container, margin) {\n    return Math.max(0,nv.utils.sanitizeHeight(height, container) - margin.top - margin.bottom);\n};\n\n/*\nCalculate the available width for a chart.\n*/\nnv.utils.availableWidth = function(width, container, margin) {\n    return Math.max(0,nv.utils.sanitizeWidth(width, container) - margin.left - margin.right);\n};\n\n/*\nClear any rendered chart components and display a chart's 'noData' message\n*/\nnv.utils.noData = function(chart, container) {\n    var opt = chart.options(),\n        margin = opt.margin(),\n        noData = opt.noData(),\n        data = (noData == null) ? [\"No Data Available.\"] : [noData],\n        height = nv.utils.availableHeight(null, container, margin),\n        width = nv.utils.availableWidth(null, container, margin),\n        x = margin.left + width/2,\n        y = margin.top + height/2;\n\n    //Remove any previously created chart components\n    container.selectAll('g').remove();\n\n    var noDataText = container.selectAll('.nv-noData').data(data);\n\n    noDataText.enter().append('text')\n        .attr('class', 'nvd3 nv-noData')\n        .attr('dy', '-.7em')\n        .style('text-anchor', 'middle');\n\n    noDataText\n        .attr('x', x)\n        .attr('y', y)\n        .text(function(t){ return t; });\n};\n\n/*\n Wrap long labels.\n */\nnv.utils.wrapTicks = function (text, width) {\n    text.each(function() {\n        var text = d3.select(this),\n            words = text.text().split(/\\s+/).reverse(),\n            word,\n            line = [],\n            lineNumber = 0,\n            lineHeight = 1.1,\n            y = text.attr(\"y\"),\n            dy = parseFloat(text.attr(\"dy\")),\n            tspan = text.text(null).append(\"tspan\").attr(\"x\", 0).attr(\"y\", y).attr(\"dy\", dy + \"em\");\n        while (word = words.pop()) {\n            line.push(word);\n            tspan.text(line.join(\" \"));\n            if (tspan.node().getComputedTextLength() > width) {\n                line.pop();\n                tspan.text(line.join(\" \"));\n                line = [word];\n                tspan = text.append(\"tspan\").attr(\"x\", 0).attr(\"y\", y).attr(\"dy\", ++lineNumber * lineHeight + dy + \"em\").text(word);\n            }\n        }\n    });\n};\n\n/*\nCheck equality of 2 array\n*/\nnv.utils.arrayEquals = function (array1, array2) {\n    if (array1 === array2)\n        return true;\n\n    if (!array1 || !array2)\n        return false;\n\n    // compare lengths - can save a lot of time\n    if (array1.length != array2.length)\n        return false;\n\n    for (var i = 0,\n        l = array1.length; i < l; i++) {\n        // Check if we have nested arrays\n        if (array1[i] instanceof Array && array2[i] instanceof Array) {\n            // recurse into the nested arrays\n            if (!nv.arrayEquals(array1[i], array2[i]))\n                return false;\n        } else if (array1[i] != array2[i]) {\n            // Warning - two different object instances will never be equal: {x:20} != {x:20}\n            return false;\n        }\n    }\n    return true;\n};\n","nv.models.axis = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var axis = d3.svg.axis();\n    var scale = d3.scale.linear();\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 75 //only used for tickLabel currently\n        , height = 60 //only used for tickLabel currently\n        , axisLabelText = null\n        , showMaxMin = true //TODO: showMaxMin should be disabled on all ordinal scaled axes\n        , rotateLabels = 0\n        , rotateYLabel = true\n        , staggerLabels = false\n        , isOrdinal = false\n        , ticks = null\n        , axisLabelDistance = 0\n        , fontSize = undefined\n        , duration = 250\n        , dispatch = d3.dispatch('renderEnd')\n        ;\n    axis\n        .scale(scale)\n        .orient('bottom')\n        .tickFormat(function(d) { return d })\n    ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var scale0;\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-axis').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-axis');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            if (ticks !== null)\n                axis.ticks(ticks);\n            else if (axis.orient() == 'top' || axis.orient() == 'bottom')\n                axis.ticks(Math.abs(scale.range()[1] - scale.range()[0]) / 100);\n\n            //TODO: consider calculating width/height based on whether or not label is added, for reference in charts using this component\n            g.watchTransition(renderWatch, 'axis').call(axis);\n\n            scale0 = scale0 || axis.scale();\n\n            var fmt = axis.tickFormat();\n            if (fmt == null) {\n                fmt = scale0.tickFormat();\n            }\n\n            var axisLabel = g.selectAll('text.nv-axislabel')\n                .data([axisLabelText || null]);\n            axisLabel.exit().remove();\n\n            //only skip when fontSize is undefined so it can be cleared with a null or blank string\n            if (fontSize !== undefined) {\n                g.selectAll('g').select(\"text\").style('font-size', fontSize);\n            }\n\n            var xLabelMargin;\n            var axisMaxMin;\n            var w;\n            switch (axis.orient()) {\n                case 'top':\n                    axisLabel.enter().append('text').attr('class', 'nv-axislabel');\n                  w = 0;\n                  if (scale.range().length === 1) {\n                    w = isOrdinal ? scale.range()[0] * 2 + scale.rangeBand() : 0;\n                  } else if (scale.range().length === 2) {\n                    w = isOrdinal ? scale.range()[0] + scale.range()[1] + scale.rangeBand() : scale.range()[1];\n                  } else if ( scale.range().length > 2){\n                    w = scale.range()[scale.range().length-1]+(scale.range()[1]-scale.range()[0]);\n                  };\n                    axisLabel\n                        .attr('text-anchor', 'middle')\n                        .attr('y', 0)\n                        .attr('x', w/2);\n                    if (showMaxMin) {\n                        axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')\n                            .data(scale.domain());\n                        axisMaxMin.enter().append('g').attr('class',function(d,i){\n                                return ['nv-axisMaxMin','nv-axisMaxMin-x',(i == 0 ? 'nv-axisMin-x':'nv-axisMax-x')].join(' ')\n                        }).append('text');\n                        axisMaxMin.exit().remove();\n                        axisMaxMin\n                            .attr('transform', function(d,i) {\n                                return 'translate(' + nv.utils.NaNtoZero(scale(d)) + ',0)'\n                            })\n                            .select('text')\n                            .attr('dy', '-0.5em')\n                            .attr('y', -axis.tickPadding())\n                            .attr('text-anchor', 'middle')\n                            .text(function(d,i) {\n                                var v = fmt(d);\n                                return ('' + v).match('NaN') ? '' : v;\n                            });\n                        axisMaxMin.watchTransition(renderWatch, 'min-max top')\n                            .attr('transform', function(d,i) {\n                                return 'translate(' + nv.utils.NaNtoZero(scale.range()[i]) + ',0)'\n                            });\n                    }\n                    break;\n                case 'bottom':\n                    xLabelMargin = axisLabelDistance + 36;\n                    var maxTextWidth = 30;\n                    var textHeight = 0;\n                    var xTicks = g.selectAll('g').select(\"text\");\n                    var rotateLabelsRule = '';\n                    if (rotateLabels%360) {\n                        //Reset transform on ticks so textHeight can be calculated correctly\n                        xTicks.attr('transform', ''); \n                        //Calculate the longest xTick width\n                        xTicks.each(function(d,i){\n                            var box = this.getBoundingClientRect();\n                            var width = box.width;\n                            textHeight = box.height;\n                            if(width > maxTextWidth) maxTextWidth = width;\n                        });\n                        rotateLabelsRule = 'rotate(' + rotateLabels + ' 0,' + (textHeight/2 + axis.tickPadding()) + ')';\n                        //Convert to radians before calculating sin. Add 30 to margin for healthy padding.\n                        var sin = Math.abs(Math.sin(rotateLabels*Math.PI/180));\n                        xLabelMargin = (sin ? sin*maxTextWidth : maxTextWidth)+30;\n                        //Rotate all xTicks\n                        xTicks\n                            .attr('transform', rotateLabelsRule)\n                            .style('text-anchor', rotateLabels%360 > 0 ? 'start' : 'end');\n                    } else {\n                        if (staggerLabels) {\n                            xTicks\n                                .attr('transform', function(d,i) {\n                                    return 'translate(0,' + (i % 2 == 0 ? '0' : '12') + ')'\n                                });\n                        } else {\n                            xTicks.attr('transform', \"translate(0,0)\");\n                        }\n                    }\n                    axisLabel.enter().append('text').attr('class', 'nv-axislabel');\n                    w = 0;\n                    if (scale.range().length === 1) {\n                        w = isOrdinal ? scale.range()[0] * 2 + scale.rangeBand() : 0;\n                    } else if (scale.range().length === 2) {\n                        w = isOrdinal ? scale.range()[0] + scale.range()[1] + scale.rangeBand() : scale.range()[1];\n                    } else if ( scale.range().length > 2){\n                        w = scale.range()[scale.range().length-1]+(scale.range()[1]-scale.range()[0]);\n                    };\n                    axisLabel\n                        .attr('text-anchor', 'middle')\n                        .attr('y', xLabelMargin)\n                        .attr('x', w/2);\n                    if (showMaxMin) {\n                        //if (showMaxMin && !isOrdinal) {\n                        axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')\n                            //.data(scale.domain())\n                            .data([scale.domain()[0], scale.domain()[scale.domain().length - 1]]);\n                        axisMaxMin.enter().append('g').attr('class',function(d,i){\n                                return ['nv-axisMaxMin','nv-axisMaxMin-x',(i == 0 ? 'nv-axisMin-x':'nv-axisMax-x')].join(' ')\n                        }).append('text');\n                        axisMaxMin.exit().remove();\n                        axisMaxMin\n                            .attr('transform', function(d,i) {\n                                return 'translate(' + nv.utils.NaNtoZero((scale(d) + (isOrdinal ? scale.rangeBand() / 2 : 0))) + ',0)'\n                            })\n                            .select('text')\n                            .attr('dy', '.71em')\n                            .attr('y', axis.tickPadding())\n                            .attr('transform', rotateLabelsRule)\n                            .style('text-anchor', rotateLabels ? (rotateLabels%360 > 0 ? 'start' : 'end') : 'middle')\n                            .text(function(d,i) {\n                                var v = fmt(d);\n                                return ('' + v).match('NaN') ? '' : v;\n                            });\n                        axisMaxMin.watchTransition(renderWatch, 'min-max bottom')\n                            .attr('transform', function(d,i) {\n                                return 'translate(' + nv.utils.NaNtoZero((scale(d) + (isOrdinal ? scale.rangeBand() / 2 : 0))) + ',0)'\n                            });\n                    }\n\n                    break;\n                case 'right':\n                    axisLabel.enter().append('text').attr('class', 'nv-axislabel');\n                    axisLabel\n                        .style('text-anchor', rotateYLabel ? 'middle' : 'begin')\n                        .attr('transform', rotateYLabel ? 'rotate(90)' : '')\n                        .attr('y', rotateYLabel ? (-Math.max(margin.right, width) + 12 - (axisLabelDistance || 0)) : -10) //TODO: consider calculating this based on largest tick width... OR at least expose this on chart\n                        .attr('x', rotateYLabel ? (d3.max(scale.range()) / 2) : axis.tickPadding());\n                    if (showMaxMin) {\n                        axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')\n                            .data(scale.domain());\n                       \taxisMaxMin.enter().append('g').attr('class',function(d,i){\n                                return ['nv-axisMaxMin','nv-axisMaxMin-y',(i == 0 ? 'nv-axisMin-y':'nv-axisMax-y')].join(' ')\n                        }).append('text')\n                            .style('opacity', 0);\n                        axisMaxMin.exit().remove();\n                        axisMaxMin\n                            .attr('transform', function(d,i) {\n                                return 'translate(0,' + nv.utils.NaNtoZero(scale(d)) + ')'\n                            })\n                            .select('text')\n                            .attr('dy', '.32em')\n                            .attr('y', 0)\n                            .attr('x', axis.tickPadding())\n                            .style('text-anchor', 'start')\n                            .text(function(d, i) {\n                                var v = fmt(d);\n                                return ('' + v).match('NaN') ? '' : v;\n                            });\n                        axisMaxMin.watchTransition(renderWatch, 'min-max right')\n                            .attr('transform', function(d,i) {\n                                return 'translate(0,' + nv.utils.NaNtoZero(scale.range()[i]) + ')'\n                            })\n                            .select('text')\n                            .style('opacity', 1);\n                    }\n                    break;\n                case 'left':\n                    /*\n                     //For dynamically placing the label. Can be used with dynamically-sized chart axis margins\n                     var yTicks = g.selectAll('g').select(\"text\");\n                     yTicks.each(function(d,i){\n                     var labelPadding = this.getBoundingClientRect().width + axis.tickPadding() + 16;\n                     if(labelPadding > width) width = labelPadding;\n                     });\n                     */\n                    axisLabel.enter().append('text').attr('class', 'nv-axislabel');\n                    axisLabel\n                        .style('text-anchor', rotateYLabel ? 'middle' : 'end')\n                        .attr('transform', rotateYLabel ? 'rotate(-90)' : '')\n                        .attr('y', rotateYLabel ? (-Math.max(margin.left, width) + 25 - (axisLabelDistance || 0)) : -10)\n                        .attr('x', rotateYLabel ? (-d3.max(scale.range()) / 2) : -axis.tickPadding());\n                    if (showMaxMin) {\n                        axisMaxMin = wrap.selectAll('g.nv-axisMaxMin')\n                            .data(scale.domain());\n                        axisMaxMin.enter().append('g').attr('class',function(d,i){\n                                return ['nv-axisMaxMin','nv-axisMaxMin-y',(i == 0 ? 'nv-axisMin-y':'nv-axisMax-y')].join(' ')\n                        }).append('text')\n                            .style('opacity', 0);\n                        axisMaxMin.exit().remove();\n                        axisMaxMin\n                            .attr('transform', function(d,i) {\n                                return 'translate(0,' + nv.utils.NaNtoZero(scale0(d)) + ')'\n                            })\n                            .select('text')\n                            .attr('dy', '.32em')\n                            .attr('y', 0)\n                            .attr('x', -axis.tickPadding())\n                            .attr('text-anchor', 'end')\n                            .text(function(d,i) {\n                                var v = fmt(d);\n                                return ('' + v).match('NaN') ? '' : v;\n                            });\n                        axisMaxMin.watchTransition(renderWatch, 'min-max right')\n                            .attr('transform', function(d,i) {\n                                return 'translate(0,' + nv.utils.NaNtoZero(scale.range()[i]) + ')'\n                            })\n                            .select('text')\n                            .style('opacity', 1);\n                    }\n                    break;\n            }\n            axisLabel.text(function(d) { return d });\n\n            if (showMaxMin && (axis.orient() === 'left' || axis.orient() === 'right')) {\n                //check if max and min overlap other values, if so, hide the values that overlap\n                g.selectAll('g') // the g's wrapping each tick\n                    .each(function(d,i) {\n                        d3.select(this).select('text').attr('opacity', 1);\n                        if (scale(d) < scale.range()[1] + 10 || scale(d) > scale.range()[0] - 10) { // 10 is assuming text height is 16... if d is 0, leave it!\n                            if (d > 1e-10 || d < -1e-10) // accounts for minor floating point errors... though could be problematic if the scale is EXTREMELY SMALL\n                                d3.select(this).attr('opacity', 0);\n\n                            d3.select(this).select('text').attr('opacity', 0); // Don't remove the ZERO line!!\n                        }\n                    });\n\n                //if Max and Min = 0 only show min, Issue #281\n                if (scale.domain()[0] == scale.domain()[1] && scale.domain()[0] == 0) {\n                    wrap.selectAll('g.nv-axisMaxMin').style('opacity', function (d, i) {\n                        return !i ? 1 : 0\n                    });\n                }\n            }\n\n            if (showMaxMin && (axis.orient() === 'top' || axis.orient() === 'bottom')) {\n                var maxMinRange = [];\n                wrap.selectAll('g.nv-axisMaxMin')\n                    .each(function(d,i) {\n                        try {\n                            if (i) // i== 1, max position\n                                maxMinRange.push(scale(d) - this.getBoundingClientRect().width - 4);  //assuming the max and min labels are as wide as the next tick (with an extra 4 pixels just in case)\n                            else // i==0, min position\n                                maxMinRange.push(scale(d) + this.getBoundingClientRect().width + 4)\n                        }catch (err) {\n                            if (i) // i== 1, max position\n                                maxMinRange.push(scale(d) - 4);  //assuming the max and min labels are as wide as the next tick (with an extra 4 pixels just in case)\n                            else // i==0, min position\n                                maxMinRange.push(scale(d) + 4);\n                        }\n                    });\n                // the g's wrapping each tick\n                g.selectAll('g').each(function(d, i) {\n                    if (scale(d) < maxMinRange[0] || scale(d) > maxMinRange[1]) {\n                        if (d > 1e-10 || d < -1e-10) // accounts for minor floating point errors... though could be problematic if the scale is EXTREMELY SMALL\n                            d3.select(this).remove();\n                        else\n                            d3.select(this).select('text').remove(); // Don't remove the ZERO line!!\n                    }\n                });\n            }\n\n            //Highlight zero tick line\n            g.selectAll('.tick')\n                .filter(function (d) {\n                    /*\n                    The filter needs to return only ticks at or near zero.\n                    Numbers like 0.00001 need to count as zero as well,\n                    and the arithmetic trick below solves that.\n                    */\n                    return !parseFloat(Math.round(d * 100000) / 1000000) && (d !== undefined)\n                }) \n                .classed('zero', true);\n            \n            //store old scales for use in transitions on update\n            scale0 = scale.copy();\n\n        });\n\n        renderWatch.renderEnd('axis immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.axis = axis;\n    chart.dispatch = dispatch;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        axisLabelDistance: {get: function(){return axisLabelDistance;}, set: function(_){axisLabelDistance=_;}},\n        staggerLabels:     {get: function(){return staggerLabels;}, set: function(_){staggerLabels=_;}},\n        rotateLabels:      {get: function(){return rotateLabels;}, set: function(_){rotateLabels=_;}},\n        rotateYLabel:      {get: function(){return rotateYLabel;}, set: function(_){rotateYLabel=_;}},\n        showMaxMin:        {get: function(){return showMaxMin;}, set: function(_){showMaxMin=_;}},\n        axisLabel:         {get: function(){return axisLabelText;}, set: function(_){axisLabelText=_;}},\n        height:            {get: function(){return height;}, set: function(_){height=_;}},\n        ticks:             {get: function(){return ticks;}, set: function(_){ticks=_;}},\n        width:             {get: function(){return width;}, set: function(_){width=_;}},\n        fontSize:          {get: function(){return fontSize;}, set: function(_){fontSize=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top !== undefined    ? _.top    : margin.top;\n            margin.right  = _.right !== undefined  ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left !== undefined   ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration=_;\n            renderWatch.reset(duration);\n        }},\n        scale: {get: function(){return scale;}, set: function(_){\n            scale = _;\n            axis.scale(scale);\n            isOrdinal = typeof scale.rangeBands === 'function';\n            nv.utils.inheritOptionsD3(chart, scale, ['domain', 'range', 'rangeBand', 'rangeBands']);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n    nv.utils.inheritOptionsD3(chart, axis, ['orient', 'tickValues', 'tickSubdivide', 'tickSize', 'tickPadding', 'tickFormat']);\n    nv.utils.inheritOptionsD3(chart, scale, ['domain', 'range', 'rangeBand', 'rangeBands']);\n\n    return chart;\n};\n","nv.models.boxPlot = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0},\n        width = 960,\n        height = 500,\n        id = Math.floor(Math.random() * 10000), // Create semi-unique ID in case user doesn't select one\n        xScale = d3.scale.ordinal(),\n        yScale = d3.scale.linear(),\n        getX  = function(d) { return d.label }, // Default data model selectors.\n        getQ1 = function(d) { return d.values.Q1 },\n        getQ2 = function(d) { return d.values.Q2 },\n        getQ3 = function(d) { return d.values.Q3 },\n        getWl = function(d) { return d.values.whisker_low },\n        getWh = function(d) { return d.values.whisker_high },\n        getColor = function(d) { return d.color },\n        getOlItems  = function(d) { return d.values.outliers },\n        getOlValue = function(d, i, j) { return d },\n        getOlLabel = function(d, i, j) { return d },\n        getOlColor = function(d, i, j) { return undefined },\n        color = nv.utils.defaultColor(),\n        container = null,\n        xDomain, xRange,\n        yDomain, yRange,\n        dispatch = d3.dispatch('elementMouseover', 'elementMouseout', 'elementMousemove', 'renderEnd'),\n        duration = 250,\n        maxBoxWidth = null;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var xScale0, yScale0;\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                availableHeight = height - margin.top - margin.bottom;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            // Setup Scales\n            xScale.domain(xDomain || data.map(function(d,i) { return getX(d,i); }))\n                .rangeBands(xRange || [0, availableWidth], 0.1);\n\n            // if we know yDomain, no need to calculate\n            var yData = []\n            if (!yDomain) {\n                // (y-range is based on quartiles, whiskers and outliers)\n                var values = [], yMin, yMax;\n                data.forEach(function (d, i) {\n                    var q1 = getQ1(d), q3 = getQ3(d), wl = getWl(d), wh = getWh(d);\n                    var olItems = getOlItems(d);\n                    if (olItems) {\n                        olItems.forEach(function (e, i) {\n                            values.push(getOlValue(e, i, undefined));\n                        });\n                    }\n                    if (wl) { values.push(wl) }\n                    if (q1) { values.push(q1) }\n                    if (q3) { values.push(q3) }\n                    if (wh) { values.push(wh) }\n                });\n                yMin = d3.min(values);\n                yMax = d3.max(values);\n                yData = [ yMin, yMax ] ;\n            }\n\n            yScale.domain(yDomain || yData);\n            yScale.range(yRange || [availableHeight, 0]);\n\n            //store old scales if they exist\n            xScale0 = xScale0 || xScale;\n            yScale0 = yScale0 || yScale.copy().range([yScale(0),yScale(0)]);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap');\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            var boxplots = wrap.selectAll('.nv-boxplot').data(function(d) { return d });\n            var boxEnter = boxplots.enter().append('g').style('stroke-opacity', 1e-6).style('fill-opacity', 1e-6);\n            boxplots\n                .attr('class', 'nv-boxplot')\n                .attr('transform', function(d,i,j) { return 'translate(' + (xScale(getX(d,i)) + xScale.rangeBand() * 0.05) + ', 0)'; })\n                .classed('hover', function(d) { return d.hover });\n            boxplots\n                .watchTransition(renderWatch, 'nv-boxplot: boxplots')\n                .style('stroke-opacity', 1)\n                .style('fill-opacity', 0.75)\n                .delay(function(d,i) { return i * duration / data.length })\n                .attr('transform', function(d,i) {\n                    return 'translate(' + (xScale(getX(d,i)) + xScale.rangeBand() * 0.05) + ', 0)';\n                });\n            boxplots.exit().remove();\n\n            // ----- add the SVG elements for each boxPlot -----\n\n            // conditionally append whisker lines\n            boxEnter.each(function(d,i) {\n                var box = d3.select(this);\n                [getWl, getWh].forEach(function (f) {\n                    if (f(d) !== undefined && f(d) !== null) {\n                        var key = (f === getWl) ? 'low' : 'high';\n                        box.append('line')\n                          .style('stroke', getColor(d) || color(d,i))\n                          .attr('class', 'nv-boxplot-whisker nv-boxplot-' + key);\n                        box.append('line')\n                          .style('stroke', getColor(d) || color(d,i))\n                          .attr('class', 'nv-boxplot-tick nv-boxplot-' + key);\n                    }\n                });\n            });\n\n            var box_width = function() { return (maxBoxWidth === null ? xScale.rangeBand() * 0.9 : Math.min(75, xScale.rangeBand() * 0.9)); };\n            var box_left  = function() { return xScale.rangeBand() * 0.45 - box_width()/2; };\n            var box_right = function() { return xScale.rangeBand() * 0.45 + box_width()/2; };\n\n            // update whisker lines and ticks\n            [getWl, getWh].forEach(function (f) {\n                var key = (f === getWl) ? 'low' : 'high';\n                var endpoint = (f === getWl) ? getQ1 : getQ3;\n                boxplots.select('line.nv-boxplot-whisker.nv-boxplot-' + key)\n                  .watchTransition(renderWatch, 'nv-boxplot: boxplots')\n                    .attr('x1', xScale.rangeBand() * 0.45 )\n                    .attr('y1', function(d,i) { return yScale(f(d)); })\n                    .attr('x2', xScale.rangeBand() * 0.45 )\n                    .attr('y2', function(d,i) { return yScale(endpoint(d)); });\n                boxplots.select('line.nv-boxplot-tick.nv-boxplot-' + key)\n                  .watchTransition(renderWatch, 'nv-boxplot: boxplots')\n                    .attr('x1', box_left )\n                    .attr('y1', function(d,i) { return yScale(f(d)); })\n                    .attr('x2', box_right )\n                    .attr('y2', function(d,i) { return yScale(f(d)); });\n            });\n\n            [getWl, getWh].forEach(function (f) {\n                var key = (f === getWl) ? 'low' : 'high';\n                boxEnter.selectAll('.nv-boxplot-' + key)\n                  .on('mouseover', function(d,i,j) {\n                      d3.select(this).classed('hover', true);\n                      dispatch.elementMouseover({\n                          series: { key: f(d), color: getColor(d) || color(d,j) },\n                          e: d3.event\n                      });\n                  })\n                  .on('mouseout', function(d,i,j) {\n                      d3.select(this).classed('hover', false);\n                      dispatch.elementMouseout({\n                          series: { key: f(d), color: getColor(d) || color(d,j) },\n                          e: d3.event\n                      });\n                  })\n                  .on('mousemove', function(d,i) {\n                      dispatch.elementMousemove({e: d3.event});\n                  });\n            });\n\n            // boxes\n            boxEnter.append('rect')\n                .attr('class', 'nv-boxplot-box')\n                // tooltip events\n                .on('mouseover', function(d,i) {\n                    d3.select(this).classed('hover', true);\n                    dispatch.elementMouseover({\n                        key: getX(d),\n                        value: getX(d),\n                        series: [\n                            { key: 'Q3', value: getQ3(d), color: getColor(d) || color(d,i) },\n                            { key: 'Q2', value: getQ2(d), color: getColor(d) || color(d,i) },\n                            { key: 'Q1', value: getQ1(d), color: getColor(d) || color(d,i) }\n                        ],\n                        data: d,\n                        index: i,\n                        e: d3.event\n                    });\n                })\n                .on('mouseout', function(d,i) {\n                    d3.select(this).classed('hover', false);\n                    dispatch.elementMouseout({\n                        key: getX(d),\n                        value: getX(d),\n                        series: [\n                            { key: 'Q3', value: getQ3(d), color: getColor(d) || color(d,i) },\n                            { key: 'Q2', value: getQ2(d), color: getColor(d) || color(d,i) },\n                            { key: 'Q1', value: getQ1(d), color: getColor(d) || color(d,i) }\n                        ],\n                        data: d,\n                        index: i,\n                        e: d3.event\n                    });\n                })\n                .on('mousemove', function(d,i) {\n                    dispatch.elementMousemove({e: d3.event});\n                });\n\n            // box transitions\n            boxplots.select('rect.nv-boxplot-box')\n              .watchTransition(renderWatch, 'nv-boxplot: boxes')\n                .attr('y', function(d,i) { return yScale(getQ3(d)); })\n                .attr('width', box_width)\n                .attr('x', box_left )\n                .attr('height', function(d,i) { return Math.abs(yScale(getQ3(d)) - yScale(getQ1(d))) || 1 })\n                .style('fill', function(d,i) { return getColor(d) || color(d,i) })\n                .style('stroke', function(d,i) { return getColor(d) || color(d,i) });\n\n            // median line\n            boxEnter.append('line').attr('class', 'nv-boxplot-median');\n\n            boxplots.select('line.nv-boxplot-median')\n              .watchTransition(renderWatch, 'nv-boxplot: boxplots line')\n                .attr('x1', box_left)\n                .attr('y1', function(d,i) { return yScale(getQ2(d)); })\n                .attr('x2', box_right)\n                .attr('y2', function(d,i) { return yScale(getQ2(d)); });\n\n            // outliers\n            var outliers = boxplots.selectAll('.nv-boxplot-outlier').data(function(d) {\n                return getOlItems(d) || [];\n            });\n            outliers.enter().append('circle')\n                .style('fill', function(d,i,j) { return getOlColor(d,i,j) || color(d,j) })\n                .style('stroke', function(d,i,j) { return getOlColor(d,i,j) || color(d,j) })\n                .style('z-index', 9000)\n                .on('mouseover', function(d,i,j) {\n                    d3.select(this).classed('hover', true);\n                    dispatch.elementMouseover({\n                        series: { key: getOlLabel(d,i,j), color: getOlColor(d,i,j) || color(d,j) },\n                        e: d3.event\n                    });\n                })\n                .on('mouseout', function(d,i,j) {\n                    d3.select(this).classed('hover', false);\n                    dispatch.elementMouseout({\n                        series: { key: getOlLabel(d,i,j), color: getOlColor(d,i,j) || color(d,j) },\n                        e: d3.event\n                    });\n                })\n                .on('mousemove', function(d,i) {\n                    dispatch.elementMousemove({e: d3.event});\n                });\n            outliers.attr('class', 'nv-boxplot-outlier');\n            outliers\n              .watchTransition(renderWatch, 'nv-boxplot: nv-boxplot-outlier')\n                .attr('cx', xScale.rangeBand() * 0.45)\n                .attr('cy', function(d,i,j) { return yScale(getOlValue(d,i,j)); })\n                .attr('r', '3');\n            outliers.exit().remove();\n\n            //store old scales for use in transitions on update\n            xScale0 = xScale.copy();\n            yScale0 = yScale.copy();\n        });\n\n        renderWatch.renderEnd('nv-boxplot immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:       {get: function(){return width;}, set: function(_){width=_;}},\n        height:      {get: function(){return height;}, set: function(_){height=_;}},\n        maxBoxWidth: {get: function(){return maxBoxWidth;}, set: function(_){maxBoxWidth=_;}},\n        x:           {get: function(){return getX;}, set: function(_){getX=_;}},\n        q1: {get: function(){return getQ1;}, set: function(_){getQ1=_;}},\n        q2: {get: function(){return getQ2;}, set: function(_){getQ2=_;}},\n        q3: {get: function(){return getQ3;}, set: function(_){getQ3=_;}},\n        wl: {get: function(){return getWl;}, set: function(_){getWl=_;}},\n        wh: {get: function(){return getWh;}, set: function(_){getWh=_;}},\n        itemColor:    {get: function(){return getColor;}, set: function(_){getColor=_;}},\n        outliers:     {get: function(){return getOlItems;}, set: function(_){getOlItems=_;}},\n        outlierValue: {get: function(){return getOlValue;}, set: function(_){getOlValue=_;}},\n        outlierLabel: {get: function(){return getOlLabel;}, set: function(_){getOlLabel=_;}},\n        outlierColor: {get: function(){return getOlColor;}, set: function(_){getOlColor=_;}},\n        xScale:  {get: function(){return xScale;}, set: function(_){xScale=_;}},\n        yScale:  {get: function(){return yScale;}, set: function(_){yScale=_;}},\n        xDomain: {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain: {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:  {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:  {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        id:          {get: function(){return id;}, set: function(_){id=_;}},\n        // rectClass: {get: function(){return rectClass;}, set: function(_){rectClass=_;}},\n        y: {\n            get: function() {\n                console.warn('BoxPlot \\'y\\' chart option is deprecated. Please use model overrides instead.');\n                return {};\n            },\n            set: function(_) {\n                console.warn('BoxPlot \\'y\\' chart option is deprecated. Please use model overrides instead.');\n            }\n        },\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","nv.models.boxPlotChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var boxplot = nv.models.boxPlot(),\n        xAxis = nv.models.axis(),\n        yAxis = nv.models.axis();\n\n    var margin = {top: 15, right: 10, bottom: 50, left: 60},\n        width = null,\n        height = null,\n        color = nv.utils.getColor(),\n        showXAxis = true,\n        showYAxis = true,\n        rightAlignYAxis = false,\n        staggerLabels = false,\n        tooltip = nv.models.tooltip(),\n        x, y,\n        noData = 'No Data Available.',\n        dispatch = d3.dispatch('beforeUpdate', 'renderEnd'),\n        duration = 250;\n\n    xAxis\n        .orient('bottom')\n        .showMaxMin(false)\n        .tickFormat(function(d) { return d })\n    ;\n    yAxis\n        .orient((rightAlignYAxis) ? 'right' : 'left')\n        .tickFormat(d3.format(',.1f'))\n    ;\n\n    tooltip.duration(0);\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(boxplot);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n\n        selection.each(function(data) {\n            var container = d3.select(this), that = this;\n            nv.utils.initSVG(container);\n            var availableWidth = (width  || parseInt(container.style('width')) || 960) - margin.left - margin.right;\n            var availableHeight = (height || parseInt(container.style('height')) || 400) - margin.top - margin.bottom;\n\n            chart.update = function() {\n                dispatch.beforeUpdate();\n                container.transition().duration(duration).call(chart);\n            };\n            chart.container = this;\n\n            // TODO still need to find a way to validate quartile data presence using boxPlot callbacks.\n            // Display No Data message if there's nothing to show. (quartiles required at minimum).\n            if (!data || !data.length) {\n                var noDataText = container.selectAll('.nv-noData').data([noData]);\n\n                noDataText.enter().append('text')\n                    .attr('class', 'nvd3 nv-noData')\n                    .attr('dy', '-.7em')\n                    .style('text-anchor', 'middle');\n\n                noDataText\n                    .attr('x', margin.left + availableWidth / 2)\n                    .attr('y', margin.top + availableHeight / 2)\n                    .text(function(d) { return d });\n\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            x = boxplot.xScale();\n            y = boxplot.yScale().clamp(true);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-boxPlotWithAxes').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-boxPlotWithAxes').append('g');\n            var defsEnter = gEnter.append('defs');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y nv-axis')\n                .append('g').attr('class', 'nv-zeroLine')\n                .append('line');\n\n            gEnter.append('g').attr('class', 'nv-barsWrap');\n            g.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            if (rightAlignYAxis) {\n                g.select('.nv-y.nv-axis')\n                    .attr('transform', 'translate(' + availableWidth + ',0)');\n            }\n\n            // Main Chart Component(s)\n            boxplot.width(availableWidth).height(availableHeight);\n\n            var barsWrap = g.select('.nv-barsWrap')\n                .datum(data.filter(function(d) { return !d.disabled }))\n\n            barsWrap.transition().call(boxplot);\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-x-label-clip-' + boxplot.id())\n                .append('rect');\n\n            g.select('#nv-x-label-clip-' + boxplot.id() + ' rect')\n                .attr('width', x.rangeBand() * (staggerLabels ? 2 : 1))\n                .attr('height', 16)\n                .attr('x', -x.rangeBand() / (staggerLabels ? 1 : 2 ));\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    .ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize(-availableHeight, 0);\n\n                g.select('.nv-x.nv-axis').attr('transform', 'translate(0,' + y.range()[0] + ')');\n                g.select('.nv-x.nv-axis').call(xAxis);\n\n                var xTicks = g.select('.nv-x.nv-axis').selectAll('g');\n                if (staggerLabels) {\n                    xTicks\n                        .selectAll('text')\n                        .attr('transform', function(d,i,j) { return 'translate(0,' + (j % 2 === 0 ? '5' : '17') + ')' })\n                }\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    .ticks( Math.floor(availableHeight/36) ) // can't use nv.utils.calcTicksY with Object data\n                    .tickSize( -availableWidth, 0);\n\n                g.select('.nv-y.nv-axis').call(yAxis);\n            }\n\n            // Zero line\n            g.select('.nv-zeroLine line')\n                .attr('x1',0)\n                .attr('x2',availableWidth)\n                .attr('y1', y(0))\n                .attr('y2', y(0))\n            ;\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n        });\n\n        renderWatch.renderEnd('nv-boxplot chart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    boxplot.dispatch.on('elementMouseover.tooltip', function(evt) {\n        tooltip.data(evt).hidden(false);\n    });\n\n    boxplot.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.data(evt).hidden(true);\n    });\n\n    boxplot.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.boxplot = boxplot;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        staggerLabels: {get: function(){return staggerLabels;}, set: function(_){staggerLabels=_;}},\n        showXAxis: {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis: {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        tooltipContent:    {get: function(){return tooltip;}, set: function(_){tooltip=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            boxplot.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            boxplot.color(color);\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( (_) ? 'right' : 'left');\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, boxplot);\n    nv.utils.initOptions(chart);\n\n    return chart;\n}\n","\n// Chart design based on the recommendations of Stephen Few. Implementation\n// based on the work of Clint Ivy, Jamie Love, and Jason Davies.\n// http://projects.instantcognition.com/protovis/bulletchart/\n\nnv.models.bullet = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , orient = 'left' // TODO top & bottom\n        , reverse = false\n        , ranges = function(d) { return d.ranges }\n        , markers = function(d) { return d.markers ? d.markers : [] }\n        , markerLines = function(d) { return d.markerLines ? d.markerLines : [0] }\n        , measures = function(d) { return d.measures }\n        , rangeLabels = function(d) { return d.rangeLabels ? d.rangeLabels : [] }\n        , markerLabels = function(d) { return d.markerLabels ? d.markerLabels : []  }\n        , markerLineLabels = function(d) { return d.markerLineLabels ? d.markerLineLabels : []  }\n        , measureLabels = function(d) { return d.measureLabels ? d.measureLabels : []  }\n        , forceX = [0] // List of numbers to Force into the X scale (ie. 0, or a max / min, etc.)\n        , width = 380\n        , height = 30\n        , container = null\n        , tickFormat = null\n        , color = nv.utils.getColor(['#1f77b4'])\n        , dispatch = d3.dispatch('elementMouseover', 'elementMouseout', 'elementMousemove')\n        , defaultRangeLabels = [\"Maximum\", \"Mean\", \"Minimum\"]\n        , legacyRangeClassNames = [\"Max\", \"Avg\", \"Min\"]\n        , duration = 1000\n        ;\n\n    function sortLabels(labels, values){\n        var lz = labels.slice();\n        labels.sort(function(a, b){\n            var iA = lz.indexOf(a);\n            var iB = lz.indexOf(b);\n            return d3.descending(values[iA], values[iB]);\n        });\n    };\n\n    function chart(selection) {\n        selection.each(function(d, i) {\n            var availableWidth = width - margin.left - margin.right,\n                availableHeight = height - margin.top - margin.bottom;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            var rangez = ranges.call(this, d, i).slice(),\n                markerz = markers.call(this, d, i).slice(),\n                markerLinez = markerLines.call(this, d, i).slice(),\n                measurez = measures.call(this, d, i).slice(),\n                rangeLabelz = rangeLabels.call(this, d, i).slice(),\n                markerLabelz = markerLabels.call(this, d, i).slice(),\n                markerLineLabelz = markerLineLabels.call(this, d, i).slice(),\n                measureLabelz = measureLabels.call(this, d, i).slice();\n\n            // Sort labels according to their sorted values\n            sortLabels(rangeLabelz, rangez);\n            sortLabels(markerLabelz, markerz);\n            sortLabels(markerLineLabelz, markerLinez);\n            sortLabels(measureLabelz, measurez);\n\n            // sort values descending\n            rangez.sort(d3.descending);\n            markerz.sort(d3.descending);\n            markerLinez.sort(d3.descending);\n            measurez.sort(d3.descending);\n\n            // Setup Scales\n            // Compute the new x-scale.\n            var x1 = d3.scale.linear()\n                .domain( d3.extent(d3.merge([forceX, rangez])) )\n                .range(reverse ? [availableWidth, 0] : [0, availableWidth]);\n\n            // Retrieve the old x-scale, if this is an update.\n            var x0 = this.__chart__ || d3.scale.linear()\n                .domain([0, Infinity])\n                .range(x1.range());\n\n            // Stash the new scale.\n            this.__chart__ = x1;\n\n            var rangeMin = d3.min(rangez), //rangez[2]\n                rangeMax = d3.max(rangez), //rangez[0]\n                rangeAvg = rangez[1];\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-bullet').data([d]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-bullet');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            for(var i=0,il=rangez.length; i<il; i++){\n                var rangeClassNames = 'nv-range nv-range'+i;\n                if(i <= 2){\n                    rangeClassNames = rangeClassNames + ' nv-range'+legacyRangeClassNames[i];\n                }\n                gEnter.append('rect').attr('class', rangeClassNames);\n            }\n\n            gEnter.append('rect').attr('class', 'nv-measure');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            var w0 = function(d) { return Math.abs(x0(d) - x0(0)) }, // TODO: could optimize by precalculating x0(0) and x1(0)\n                w1 = function(d) { return Math.abs(x1(d) - x1(0)) };\n            var xp0 = function(d) { return d < 0 ? x0(d) : x0(0) },\n                xp1 = function(d) { return d < 0 ? x1(d) : x1(0) };\n\n            for(var i=0,il=rangez.length; i<il; i++){\n                var range = rangez[i];\n                g.select('rect.nv-range'+i)\n                    .datum(range)\n                    .attr('height', availableHeight)\n                    .transition()\n                    .duration(duration)\n                    .attr('width', w1(range))\n                    .attr('x', xp1(range))\n            }\n\n            g.select('rect.nv-measure')\n                .style('fill', color)\n                .attr('height', availableHeight / 3)\n                .attr('y', availableHeight / 3)\n                .on('mouseover', function() {\n                    dispatch.elementMouseover({\n                        value: measurez[0],\n                        label: measureLabelz[0] || 'Current',\n                        color: d3.select(this).style(\"fill\")\n                    })\n                })\n                .on('mousemove', function() {\n                    dispatch.elementMousemove({\n                        value: measurez[0],\n                        label: measureLabelz[0] || 'Current',\n                        color: d3.select(this).style(\"fill\")\n                    })\n                })\n                .on('mouseout', function() {\n                    dispatch.elementMouseout({\n                        value: measurez[0],\n                        label: measureLabelz[0] || 'Current',\n                        color: d3.select(this).style(\"fill\")\n                    })\n                })\n                .transition()\n                .duration(duration)\n                .attr('width', measurez < 0 ?\n                    x1(0) - x1(measurez[0])\n                    : x1(measurez[0]) - x1(0))\n                .attr('x', xp1(measurez));\n\n            var h3 =  availableHeight / 6;\n\n            var markerData = markerz.map( function(marker, index) {\n                return {value: marker, label: markerLabelz[index]}\n            });\n            gEnter\n              .selectAll(\"path.nv-markerTriangle\")\n              .data(markerData)\n              .enter()\n              .append('path')\n              .attr('class', 'nv-markerTriangle')\n              .attr('d', 'M0,' + h3 + 'L' + h3 + ',' + (-h3) + ' ' + (-h3) + ',' + (-h3) + 'Z')\n              .on('mouseover', function(d) {\n                dispatch.elementMouseover({\n                  value: d.value,\n                  label: d.label || 'Previous',\n                  color: d3.select(this).style(\"fill\"),\n                  pos: [x1(d.value), availableHeight/2]\n                })\n\n              })\n              .on('mousemove', function(d) {\n                  dispatch.elementMousemove({\n                      value: d.value,\n                      label: d.label || 'Previous',\n                      color: d3.select(this).style(\"fill\")\n                  })\n              })\n              .on('mouseout', function(d, i) {\n                  dispatch.elementMouseout({\n                      value: d.value,\n                      label: d.label || 'Previous',\n                      color: d3.select(this).style(\"fill\")\n                  })\n              });\n\n            g.selectAll(\"path.nv-markerTriangle\")\n              .data(markerData)\n              .transition()\n              .duration(duration)\n              .attr('transform', function(d) { return 'translate(' + x1(d.value) + ',' + (availableHeight / 2) + ')' });\n\n            var markerLinesData = markerLinez.map( function(marker, index) {\n                return {value: marker, label: markerLineLabelz[index]}\n            });\n            gEnter\n              .selectAll(\"line.nv-markerLine\")\n              .data(markerLinesData)\n              .enter()\n              .append('line')\n              .attr('cursor', '')\n              .attr('class', 'nv-markerLine')\n              .attr('x1', function(d) { return x1(d.value) })\n              .attr('y1', '2')\n              .attr('x2', function(d) { return x1(d.value) })\n              .attr('y2', availableHeight - 2)\n              .on('mouseover', function(d) {\n                dispatch.elementMouseover({\n                  value: d.value,\n                  label: d.label || 'Previous',\n                  color: d3.select(this).style(\"fill\"),\n                  pos: [x1(d.value), availableHeight/2]\n                })\n\n              })\n              .on('mousemove', function(d) {\n                  dispatch.elementMousemove({\n                      value: d.value,\n                      label: d.label || 'Previous',\n                      color: d3.select(this).style(\"fill\")\n                  })\n              })\n              .on('mouseout', function(d, i) {\n                  dispatch.elementMouseout({\n                      value: d.value,\n                      label: d.label || 'Previous',\n                      color: d3.select(this).style(\"fill\")\n                  })\n              });\n\n            g.selectAll(\"line.nv-markerLine\")\n              .data(markerLinesData)\n              .transition()\n              .duration(duration)\n              .attr('x1', function(d) { return x1(d.value) })\n              .attr('x2', function(d) { return x1(d.value) });\n\n            wrap.selectAll('.nv-range')\n                .on('mouseover', function(d,i) {\n                    var label = rangeLabelz[i] || defaultRangeLabels[i];\n                    dispatch.elementMouseover({\n                        value: d,\n                        label: label,\n                        color: d3.select(this).style(\"fill\")\n                    })\n                })\n                .on('mousemove', function() {\n                    dispatch.elementMousemove({\n                        value: measurez[0],\n                        label: measureLabelz[0] || 'Previous',\n                        color: d3.select(this).style(\"fill\")\n                    })\n                })\n                .on('mouseout', function(d,i) {\n                    var label = rangeLabelz[i] || defaultRangeLabels[i];\n                    dispatch.elementMouseout({\n                        value: d,\n                        label: label,\n                        color: d3.select(this).style(\"fill\")\n                    })\n                });\n        });\n\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        ranges:      {get: function(){return ranges;}, set: function(_){ranges=_;}}, // ranges (bad, satisfactory, good)\n        markers:     {get: function(){return markers;}, set: function(_){markers=_;}}, // markers (previous, goal)\n        measures: {get: function(){return measures;}, set: function(_){measures=_;}}, // measures (actual, forecast)\n        forceX:      {get: function(){return forceX;}, set: function(_){forceX=_;}},\n        width:    {get: function(){return width;}, set: function(_){width=_;}},\n        height:    {get: function(){return height;}, set: function(_){height=_;}},\n        tickFormat:    {get: function(){return tickFormat;}, set: function(_){tickFormat=_;}},\n        duration:    {get: function(){return duration;}, set: function(_){duration=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        orient: {get: function(){return orient;}, set: function(_){ // left, right, top, bottom\n            orient = _;\n            reverse = orient == 'right' || orient == 'bottom';\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n    return chart;\n};\n\n\n","\n// Chart design based on the recommendations of Stephen Few. Implementation\n// based on the work of Clint Ivy, Jamie Love, and Jason Davies.\n// http://projects.instantcognition.com/protovis/bulletchart/\nnv.models.bulletChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var bullet = nv.models.bullet();\n    var tooltip = nv.models.tooltip();\n\n    var orient = 'left' // TODO top & bottom\n        , reverse = false\n        , margin = {top: 5, right: 40, bottom: 20, left: 120}\n        , ranges = function(d) { return d.ranges }\n        , markers = function(d) { return d.markers ? d.markers : [] }\n        , measures = function(d) { return d.measures }\n        , width = null\n        , height = 55\n        , tickFormat = null\n        , ticks = null\n        , noData = null\n        , dispatch = d3.dispatch()\n        ;\n\n    tooltip\n        .duration(0)\n        .headerEnabled(false);\n\n    function chart(selection) {\n        selection.each(function(d, i) {\n            var container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = height - margin.top - margin.bottom,\n                that = this;\n\n            chart.update = function() { chart(selection) };\n            chart.container = this;\n\n            // Display No Data message if there's nothing to show.\n            if (!d || !ranges.call(this, d, i)) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            var rangez = ranges.call(this, d, i).slice().sort(d3.descending),\n                markerz = markers.call(this, d, i).slice().sort(d3.descending),\n                measurez = measures.call(this, d, i).slice().sort(d3.descending);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-bulletChart').data([d]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-bulletChart');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-bulletWrap');\n            gEnter.append('g').attr('class', 'nv-titles');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            // Compute the new x-scale.\n            var x1 = d3.scale.linear()\n                .domain([0, Math.max(rangez[0], (markerz[0] || 0), measurez[0])])  // TODO: need to allow forceX and forceY, and xDomain, yDomain\n                .range(reverse ? [availableWidth, 0] : [0, availableWidth]);\n\n            // Retrieve the old x-scale, if this is an update.\n            var x0 = this.__chart__ || d3.scale.linear()\n                .domain([0, Infinity])\n                .range(x1.range());\n\n            // Stash the new scale.\n            this.__chart__ = x1;\n\n            var w0 = function(d) { return Math.abs(x0(d) - x0(0)) }, // TODO: could optimize by precalculating x0(0) and x1(0)\n                w1 = function(d) { return Math.abs(x1(d) - x1(0)) };\n\n            var title = gEnter.select('.nv-titles').append('g')\n                .attr('text-anchor', 'end')\n                .attr('transform', 'translate(-6,' + (height - margin.top - margin.bottom) / 2 + ')');\n            title.append('text')\n                .attr('class', 'nv-title')\n                .text(function(d) { return d.title; });\n\n            title.append('text')\n                .attr('class', 'nv-subtitle')\n                .attr('dy', '1em')\n                .text(function(d) { return d.subtitle; });\n\n            bullet\n                .width(availableWidth)\n                .height(availableHeight);\n\n            var bulletWrap = g.select('.nv-bulletWrap');\n            d3.transition(bulletWrap).call(bullet);\n\n            // Compute the tick format.\n            var format = tickFormat || x1.tickFormat( availableWidth / 100 );\n\n            // Update the tick groups.\n            var tick = g.selectAll('g.nv-tick')\n                .data(x1.ticks( ticks ? ticks : (availableWidth / 50) ), function(d) {\n                    return this.textContent || format(d);\n                });\n\n            // Initialize the ticks with the old scale, x0.\n            var tickEnter = tick.enter().append('g')\n                .attr('class', 'nv-tick')\n                .attr('transform', function(d) { return 'translate(' + x0(d) + ',0)' })\n                .style('opacity', 1e-6);\n\n            tickEnter.append('line')\n                .attr('y1', availableHeight)\n                .attr('y2', availableHeight * 7 / 6);\n\n            tickEnter.append('text')\n                .attr('text-anchor', 'middle')\n                .attr('dy', '1em')\n                .attr('y', availableHeight * 7 / 6)\n                .text(format);\n\n            // Transition the updating ticks to the new scale, x1.\n            var tickUpdate = d3.transition(tick)\n                .transition()\n                .duration(bullet.duration())\n                .attr('transform', function(d) { return 'translate(' + x1(d) + ',0)' })\n                .style('opacity', 1);\n\n            tickUpdate.select('line')\n                .attr('y1', availableHeight)\n                .attr('y2', availableHeight * 7 / 6);\n\n            tickUpdate.select('text')\n                .attr('y', availableHeight * 7 / 6);\n\n            // Transition the exiting ticks to the new scale, x1.\n            d3.transition(tick.exit())\n                .transition()\n                .duration(bullet.duration())\n                .attr('transform', function(d) { return 'translate(' + x1(d) + ',0)' })\n                .style('opacity', 1e-6)\n                .remove();\n        });\n\n        d3.timer.flush();\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    bullet.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt['series'] = {\n            key: evt.label,\n            value: evt.value,\n            color: evt.color\n        };\n        tooltip.data(evt).hidden(false);\n    });\n\n    bullet.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    bullet.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.bullet = bullet;\n    chart.dispatch = dispatch;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        ranges:      {get: function(){return ranges;}, set: function(_){ranges=_;}}, // ranges (bad, satisfactory, good)\n        markers:     {get: function(){return markers;}, set: function(_){markers=_;}}, // markers (previous, goal)\n        measures: {get: function(){return measures;}, set: function(_){measures=_;}}, // measures (actual, forecast)\n        width:    {get: function(){return width;}, set: function(_){width=_;}},\n        height:    {get: function(){return height;}, set: function(_){height=_;}},\n        tickFormat:    {get: function(){return tickFormat;}, set: function(_){tickFormat=_;}},\n        ticks:    {get: function(){return ticks;}, set: function(_){ticks=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        orient: {get: function(){return orient;}, set: function(_){ // left, right, top, bottom\n            orient = _;\n            reverse = orient == 'right' || orient == 'bottom';\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, bullet);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n\n\n","\nnv.models.candlestickBar = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = null\n        , height = null\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container\n        , x = d3.scale.linear()\n        , y = d3.scale.linear()\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , getOpen = function(d) { return d.open }\n        , getClose = function(d) { return d.close }\n        , getHigh = function(d) { return d.high }\n        , getLow = function(d) { return d.low }\n        , forceX = []\n        , forceY = []\n        , padData     = false // If true, adds half a data points width to front and back, for lining up a line chart with a bar chart\n        , clipEdge = true\n        , color = nv.utils.defaultColor()\n        , interactive = false\n        , xDomain\n        , yDomain\n        , xRange\n        , yRange\n        , dispatch = d3.dispatch('stateChange', 'changeState', 'renderEnd', 'chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'elementMousemove')\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    function chart(selection) {\n        selection.each(function(data) {\n            container = d3.select(this);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            nv.utils.initSVG(container);\n\n            // Width of the candlestick bars.\n            var barWidth = (availableWidth / data[0].values.length) * .45;\n\n            // Setup Scales\n            x.domain(xDomain || d3.extent(data[0].values.map(getX).concat(forceX) ));\n\n            if (padData)\n                x.range(xRange || [availableWidth * .5 / data[0].values.length, availableWidth * (data[0].values.length - .5)  / data[0].values.length ]);\n            else\n                x.range(xRange || [5 + barWidth / 2, availableWidth - barWidth / 2 - 5]);\n\n            y.domain(yDomain || [\n                    d3.min(data[0].values.map(getLow).concat(forceY)),\n                    d3.max(data[0].values.map(getHigh).concat(forceY))\n                ]\n            ).range(yRange || [availableHeight, 0]);\n\n            // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point\n            if (x.domain()[0] === x.domain()[1])\n                x.domain()[0] ?\n                    x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])\n                    : x.domain([-1,1]);\n\n            if (y.domain()[0] === y.domain()[1])\n                y.domain()[0] ?\n                    y.domain([y.domain()[0] + y.domain()[0] * 0.01, y.domain()[1] - y.domain()[1] * 0.01])\n                    : y.domain([-1,1]);\n\n            // Setup containers and skeleton of chart\n            var wrap = d3.select(this).selectAll('g.nv-wrap.nv-candlestickBar').data([data[0].values]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-candlestickBar');\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-ticks');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            container\n                .on('click', function(d,i) {\n                    dispatch.chartClick({\n                        data: d,\n                        index: i,\n                        pos: d3.event,\n                        id: id\n                    });\n                });\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-chart-clip-path-' + id)\n                .append('rect');\n\n            wrap.select('#nv-chart-clip-path-' + id + ' rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            g   .attr('clip-path', clipEdge ? 'url(#nv-chart-clip-path-' + id + ')' : '');\n\n            var ticks = wrap.select('.nv-ticks').selectAll('.nv-tick')\n                .data(function(d) { return d });\n            ticks.exit().remove();\n\n            var tickGroups = ticks.enter().append('g');\n\n            // The colors are currently controlled by CSS.\n            ticks\n                .attr('class', function(d, i, j) { return (getOpen(d, i) > getClose(d, i) ? 'nv-tick negative' : 'nv-tick positive') + ' nv-tick-' + j + '-' + i});\n\n            var lines = tickGroups.append('line')\n                .attr('class', 'nv-candlestick-lines')\n                .attr('transform', function(d, i) { return 'translate(' + x(getX(d, i)) + ',0)'; })\n                .attr('x1', 0)\n                .attr('y1', function(d, i) { return y(getHigh(d, i)); })\n                .attr('x2', 0)\n                .attr('y2', function(d, i) { return y(getLow(d, i)); });\n\n            var rects = tickGroups.append('rect')\n                .attr('class', 'nv-candlestick-rects nv-bars')\n                .attr('transform', function(d, i) {\n                    return 'translate(' + (x(getX(d, i)) - barWidth/2) + ','\n                    + (y(getY(d, i)) - (getOpen(d, i) > getClose(d, i) ? (y(getClose(d, i)) - y(getOpen(d, i))) : 0))\n                    + ')';\n                })\n                .attr('x', 0)\n                .attr('y', 0)\n                .attr('width', barWidth)\n                .attr('height', function(d, i) {\n                    var open = getOpen(d, i);\n                    var close = getClose(d, i);\n                    return open > close ? y(close) - y(open) : y(open) - y(close);\n                });\n\n            ticks.select('.nv-candlestick-lines').transition()\n                .attr('transform', function(d, i) { return 'translate(' + x(getX(d, i)) + ',0)'; })\n                .attr('x1', 0)\n                .attr('y1', function(d, i) { return y(getHigh(d, i)); })\n                .attr('x2', 0)\n                .attr('y2', function(d, i) { return y(getLow(d, i)); });\n\n            ticks.select('.nv-candlestick-rects').transition()\n                .attr('transform', function(d, i) {\n                    return 'translate(' + (x(getX(d, i)) - barWidth/2) + ','\n                    + (y(getY(d, i)) - (getOpen(d, i) > getClose(d, i) ? (y(getClose(d, i)) - y(getOpen(d, i))) : 0))\n                    + ')';\n                })\n                .attr('x', 0)\n                .attr('y', 0)\n                .attr('width', barWidth)\n                .attr('height', function(d, i) {\n                    var open = getOpen(d, i);\n                    var close = getClose(d, i);\n                    return open > close ? y(close) - y(open) : y(open) - y(close);\n                });\n        });\n\n        return chart;\n    }\n\n\n    //Create methods to allow outside functions to highlight a specific bar.\n    chart.highlightPoint = function(pointIndex, isHoverOver) {\n        chart.clearHighlights();\n        container.select(\".nv-candlestickBar .nv-tick-0-\" + pointIndex)\n            .classed(\"hover\", isHoverOver)\n        ;\n    };\n\n    chart.clearHighlights = function() {\n        container.select(\".nv-candlestickBar .nv-tick.hover\")\n            .classed(\"hover\", false)\n        ;\n    };\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:    {get: function(){return width;}, set: function(_){width=_;}},\n        height:   {get: function(){return height;}, set: function(_){height=_;}},\n        xScale:   {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:   {get: function(){return y;}, set: function(_){y=_;}},\n        xDomain:  {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain:  {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:   {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:   {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        forceX:   {get: function(){return forceX;}, set: function(_){forceX=_;}},\n        forceY:   {get: function(){return forceY;}, set: function(_){forceY=_;}},\n        padData:  {get: function(){return padData;}, set: function(_){padData=_;}},\n        clipEdge: {get: function(){return clipEdge;}, set: function(_){clipEdge=_;}},\n        id:       {get: function(){return id;}, set: function(_){id=_;}},\n        interactive: {get: function(){return interactive;}, set: function(_){interactive=_;}},\n\n        x:     {get: function(){return getX;}, set: function(_){getX=_;}},\n        y:     {get: function(){return getY;}, set: function(_){getY=_;}},\n        open:  {get: function(){return getOpen();}, set: function(_){getOpen=_;}},\n        close: {get: function(){return getClose();}, set: function(_){getClose=_;}},\n        high:  {get: function(){return getHigh;}, set: function(_){getHigh=_;}},\n        low:   {get: function(){return getLow;}, set: function(_){getLow=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    != undefined ? _.top    : margin.top;\n            margin.right  = _.right  != undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom != undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   != undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","\nnv.models.cumulativeLineChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var lines = nv.models.line()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n        , legend = nv.models.legend()\n        , controls = nv.models.legend()\n        , interactiveLayer = nv.interactiveGuideline()\n        , tooltip = nv.models.tooltip()\n        ;\n\n    var margin = {top: 30, right: 30, bottom: 50, left: 60}\n        , marginTop = null\n        , color = nv.utils.defaultColor()\n        , width = null\n        , height = null\n        , showLegend = true\n        , showXAxis = true\n        , showYAxis = true\n        , rightAlignYAxis = false\n        , showControls = true\n        , useInteractiveGuideline = false\n        , rescaleY = true\n        , x //can be accessed via chart.xScale()\n        , y //can be accessed via chart.yScale()\n        , id = lines.id()\n        , state = nv.utils.state()\n        , defaultState = null\n        , noData = null\n        , average = function(d) { return d.average }\n        , dispatch = d3.dispatch('stateChange', 'changeState', 'renderEnd')\n        , transitionDuration = 250\n        , duration = 250\n        , noErrorCheck = false  //if set to TRUE, will bypass an error check in the indexify function.\n        ;\n\n    state.index = 0;\n    state.rescaleY = rescaleY;\n\n    xAxis.orient('bottom').tickPadding(7);\n    yAxis.orient((rightAlignYAxis) ? 'right' : 'left');\n\n    tooltip.valueFormatter(function(d, i) {\n        return yAxis.tickFormat()(d, i);\n    }).headerFormatter(function(d, i) {\n        return xAxis.tickFormat()(d, i);\n    });\n\n    controls.updateState(false);\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var dx = d3.scale.linear()\n        , index = {i: 0, x: 0}\n        , renderWatch = nv.utils.renderWatch(dispatch, duration)\n        ;\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled }),\n                index: index.i,\n                rescaleY: rescaleY\n            };\n        }\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.index !== undefined)\n                index.i = state.index;\n            if (state.rescaleY !== undefined)\n                rescaleY = state.rescaleY;\n            if (state.active !== undefined)\n                data.forEach(function(series,i) {\n                    series.disabled = !state.active[i];\n                });\n        }\n    };\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(lines);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n        selection.each(function(data) {\n            var container = d3.select(this);\n            nv.utils.initSVG(container);\n            container.classed('nv-chart-' + id, true);\n            var that = this;\n\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() {\n                if (duration === 0)\n                    container.call(chart);\n                else\n                    container.transition().duration(duration).call(chart)\n            };\n            chart.container = this;\n\n            state\n                .setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            // DEPRECATED set state.disableddisabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            var indexDrag = d3.behavior.drag()\n                .on('dragstart', dragStart)\n                .on('drag', dragMove)\n                .on('dragend', dragEnd);\n\n\n            function dragStart(d,i) {\n                d3.select(chart.container)\n                    .style('cursor', 'ew-resize');\n            }\n\n            function dragMove(d,i) {\n                index.x = d3.event.x;\n                index.i = Math.round(dx.invert(index.x));\n                updateZero();\n            }\n\n            function dragEnd(d,i) {\n                d3.select(chart.container)\n                    .style('cursor', 'auto');\n\n                // update state and send stateChange with new index\n                state.index = index.i;\n                dispatch.stateChange(state);\n            }\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            x = lines.xScale();\n            y = lines.yScale();\n\n            if (!rescaleY) {\n                var seriesDomains = data\n                    .filter(function(series) { return !series.disabled })\n                    .map(function(series,i) {\n                        var initialDomain = d3.extent(series.values, lines.y());\n\n                        //account for series being disabled when losing 95% or more\n                        if (initialDomain[0] < -.95) initialDomain[0] = -.95;\n\n                        return [\n                                (initialDomain[0] - initialDomain[1]) / (1 + initialDomain[1]),\n                                (initialDomain[1] - initialDomain[0]) / (1 + initialDomain[0])\n                        ];\n                    });\n\n                var completeDomain = [\n                    d3.min(seriesDomains, function(d) { return d[0] }),\n                    d3.max(seriesDomains, function(d) { return d[1] })\n                ];\n\n                lines.yDomain(completeDomain);\n            } else {\n                lines.yDomain(null);\n            }\n\n            dx.domain([0, data[0].values.length - 1]) //Assumes all series have same length\n                .range([0, availableWidth])\n                .clamp(true);\n\n            var data = indexify(index.i, data);\n\n            // Setup containers and skeleton of chart\n            var interactivePointerEvents = (useInteractiveGuideline) ? \"none\" : \"all\";\n            var wrap = container.selectAll('g.nv-wrap.nv-cumulativeLine').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-cumulativeLine').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-interactive');\n            gEnter.append('g').attr('class', 'nv-x nv-axis').style(\"pointer-events\",\"none\");\n            gEnter.append('g').attr('class', 'nv-y nv-axis');\n            gEnter.append('g').attr('class', 'nv-background');\n            gEnter.append('g').attr('class', 'nv-linesWrap').style(\"pointer-events\",interactivePointerEvents);\n            gEnter.append('g').attr('class', 'nv-avgLinesWrap').style(\"pointer-events\",\"none\");\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n            gEnter.append('g').attr('class', 'nv-controlsWrap');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                legend.width(availableWidth);\n\n                g.select('.nv-legendWrap')\n                    .datum(data)\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n                }\n\n                g.select('.nv-legendWrap')\n                    .attr('transform', 'translate(0,' + (-margin.top) +')')\n            }\n\n            // Controls\n            if (!showControls) {\n                 g.select('.nv-controlsWrap').selectAll('*').remove();\n            } else {\n                var controlsData = [\n                    { key: 'Re-scale y-axis', disabled: !rescaleY }\n                ];\n\n                controls\n                    .width(140)\n                    .color(['#444', '#444', '#444'])\n                    .rightAlign(false)\n                    .margin({top: 5, right: 0, bottom: 5, left: 20})\n                ;\n\n                g.select('.nv-controlsWrap')\n                    .datum(controlsData)\n                    .attr('transform', 'translate(0,' + (-margin.top) +')')\n                    .call(controls);\n            }\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            // Show error if series goes below 100%\n            var tempDisabled = data.filter(function(d) { return d.tempDisabled });\n\n            wrap.select('.tempDisabled').remove(); //clean-up and prevent duplicates\n            if (tempDisabled.length) {\n                wrap.append('text').attr('class', 'tempDisabled')\n                    .attr('x', availableWidth / 2)\n                    .attr('y', '-.71em')\n                    .style('text-anchor', 'end')\n                    .text(tempDisabled.map(function(d) { return d.key }).join(', ') + ' values cannot be calculated for this time period.');\n            }\n\n            //Set up interactive layer\n            if (useInteractiveGuideline) {\n                interactiveLayer\n                    .width(availableWidth)\n                    .height(availableHeight)\n                    .margin({left:margin.left,top:margin.top})\n                    .svgContainer(container)\n                    .xScale(x);\n                wrap.select(\".nv-interactive\").call(interactiveLayer);\n            }\n\n            gEnter.select('.nv-background')\n                .append('rect');\n\n            g.select('.nv-background rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            lines\n                //.x(function(d) { return d.x })\n                .y(function(d) { return d.display.y })\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    return d.color || color(d, i);\n                }).filter(function(d,i) { return !data[i].disabled && !data[i].tempDisabled; }));\n\n            var linesWrap = g.select('.nv-linesWrap')\n                .datum(data.filter(function(d) { return  !d.disabled && !d.tempDisabled }));\n\n            linesWrap.call(lines);\n\n            //Store a series index number in the data array.\n            data.forEach(function(d,i) {\n                d.seriesIndex = i;\n            });\n\n            var avgLineData = data.filter(function(d) {\n                return !d.disabled && !!average(d);\n            });\n\n            var avgLines = g.select(\".nv-avgLinesWrap\").selectAll(\"line\")\n                .data(avgLineData, function(d) { return d.key; });\n\n            var getAvgLineY = function(d) {\n                //If average lines go off the svg element, clamp them to the svg bounds.\n                var yVal = y(average(d));\n                if (yVal < 0) return 0;\n                if (yVal > availableHeight) return availableHeight;\n                return yVal;\n            };\n\n            avgLines.enter()\n                .append('line')\n                .style('stroke-width',2)\n                .style('stroke-dasharray','10,10')\n                .style('stroke',function (d,i) {\n                    return lines.color()(d,d.seriesIndex);\n                })\n                .attr('x1',0)\n                .attr('x2',availableWidth)\n                .attr('y1', getAvgLineY)\n                .attr('y2', getAvgLineY);\n\n            avgLines\n                .style('stroke-opacity',function(d){\n                    //If average lines go offscreen, make them transparent\n                    var yVal = y(average(d));\n                    if (yVal < 0 || yVal > availableHeight) return 0;\n                    return 1;\n                })\n                .attr('x1',0)\n                .attr('x2',availableWidth)\n                .attr('y1', getAvgLineY)\n                .attr('y2', getAvgLineY);\n\n            avgLines.exit().remove();\n\n            //Create index line\n            var indexLine = linesWrap.selectAll('.nv-indexLine')\n                .data([index]);\n            indexLine.enter().append('rect').attr('class', 'nv-indexLine')\n                .attr('width', 3)\n                .attr('x', -2)\n                .attr('fill', 'red')\n                .attr('fill-opacity', .5)\n                .style(\"pointer-events\",\"all\")\n                .call(indexDrag);\n\n            indexLine\n                .attr('transform', function(d) { return 'translate(' + dx(d.i) + ',0)' })\n                .attr('height', availableHeight);\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/70, data) )\n                    .tickSize(-availableHeight, 0);\n\n                g.select('.nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + y.range()[0] + ')');\n                g.select('.nv-x.nv-axis')\n                    .call(xAxis);\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                    .tickSize( -availableWidth, 0);\n\n                g.select('.nv-y.nv-axis')\n                    .call(yAxis);\n            }\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            function updateZero() {\n                indexLine\n                    .data([index]);\n\n                //When dragging the index line, turn off line transitions.\n                // Then turn them back on when done dragging.\n                var oldDuration = chart.duration();\n                chart.duration(0);\n                chart.update();\n                chart.duration(oldDuration);\n            }\n\n            g.select('.nv-background rect')\n                .on('click', function() {\n                    index.x = d3.mouse(this)[0];\n                    index.i = Math.round(dx.invert(index.x));\n\n                    // update state and send stateChange with new index\n                    state.index = index.i;\n                    dispatch.stateChange(state);\n\n                    updateZero();\n                });\n\n            lines.dispatch.on('elementClick', function(e) {\n                index.i = e.pointIndex;\n                index.x = dx(index.i);\n\n                // update state and send stateChange with new index\n                state.index = index.i;\n                dispatch.stateChange(state);\n\n                updateZero();\n            });\n\n            controls.dispatch.on('legendClick', function(d,i) {\n                d.disabled = !d.disabled;\n                rescaleY = !d.disabled;\n\n                state.rescaleY = rescaleY;\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState)\n                    state[key] = newState[key];\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            interactiveLayer.dispatch.on('elementMousemove', function(e) {\n                lines.clearHighlights();\n                var singlePoint, pointIndex, pointXLocation, allData = [];\n\n                data\n                    .filter(function(series, i) {\n                        series.seriesIndex = i;\n                        return !series.disabled;\n                    })\n                    .forEach(function(series,i) {\n                        pointIndex = nv.interactiveBisect(series.values, e.pointXValue, chart.x());\n                        lines.highlightPoint(i, pointIndex, true);\n                        var point = series.values[pointIndex];\n                        if (typeof point === 'undefined') return;\n                        if (typeof singlePoint === 'undefined') singlePoint = point;\n                        if (typeof pointXLocation === 'undefined') pointXLocation = chart.xScale()(chart.x()(point,pointIndex));\n                        allData.push({\n                            key: series.key,\n                            value: chart.y()(point, pointIndex),\n                            color: color(series,series.seriesIndex)\n                        });\n                    });\n\n                //Highlight the tooltip entry based on which point the mouse is closest to.\n                if (allData.length > 2) {\n                    var yValue = chart.yScale().invert(e.mouseY);\n                    var domainExtent = Math.abs(chart.yScale().domain()[0] - chart.yScale().domain()[1]);\n                    var threshold = 0.03 * domainExtent;\n                    var indexToHighlight = nv.nearestValueIndex(allData.map(function(d){return d.value}),yValue,threshold);\n                    if (indexToHighlight !== null)\n                        allData[indexToHighlight].highlight = true;\n                }\n\n                var xValue = xAxis.tickFormat()(chart.x()(singlePoint,pointIndex), pointIndex);\n                interactiveLayer.tooltip\n                    .valueFormatter(function(d,i) {\n                        return yAxis.tickFormat()(d);\n                    })\n                    .data(\n                    {\n                        value: xValue,\n                        series: allData\n                    }\n                )();\n\n                interactiveLayer.renderGuideLine(pointXLocation);\n            });\n\n            interactiveLayer.dispatch.on(\"elementMouseout\",function(e) {\n                lines.clearHighlights();\n            });\n\n            // Update chart from a state object passed to event handler\n            dispatch.on('changeState', function(e) {\n                if (typeof e.disabled !== 'undefined') {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n\n                    state.disabled = e.disabled;\n                }\n\n                if (typeof e.index !== 'undefined') {\n                    index.i = e.index;\n                    index.x = dx(index.i);\n\n                    state.index = e.index;\n\n                    indexLine\n                        .data([index]);\n                }\n\n                if (typeof e.rescaleY !== 'undefined') {\n                    rescaleY = e.rescaleY;\n                }\n\n                chart.update();\n            });\n\n        });\n\n        renderWatch.renderEnd('cumulativeLineChart immediate');\n\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    lines.dispatch.on('elementMouseover.tooltip', function(evt) {\n        var point = {\n            x: chart.x()(evt.point),\n            y: chart.y()(evt.point),\n            color: evt.point.color\n        };\n        evt.point = point;\n        tooltip.data(evt).hidden(false);\n    });\n\n    lines.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true)\n    });\n\n    //============================================================\n    // Functions\n    //------------------------------------------------------------\n\n    var indexifyYGetter = null;\n    /* Normalize the data according to an index point. */\n    function indexify(idx, data) {\n        if (!indexifyYGetter) indexifyYGetter = lines.y();\n        return data.map(function(line, i) {\n            if (!line.values) {\n                return line;\n            }\n            var indexValue = line.values[idx];\n            if (indexValue == null) {\n                return line;\n            }\n            var v = indexifyYGetter(indexValue, idx);\n\n            //TODO: implement check below, and disable series if series loses 100% or more cause divide by 0 issue\n            if (v < -.95 && !noErrorCheck) {\n                //if a series loses more than 100%, calculations fail.. anything close can cause major distortion (but is mathematically correct till it hits 100)\n\n                line.tempDisabled = true;\n                return line;\n            }\n\n            line.tempDisabled = false;\n\n            line.values = line.values.map(function(point, pointIndex) {\n                point.display = {'y': (indexifyYGetter(point, pointIndex) - v) / (1 + v) };\n                return point;\n            });\n\n            return line;\n        })\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.lines = lines;\n    chart.legend = legend;\n    chart.controls = controls;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.interactiveLayer = interactiveLayer;\n    chart.state = state;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        rescaleY:     {get: function(){return rescaleY;}, set: function(_){rescaleY=_;}},\n        showControls:     {get: function(){return showControls;}, set: function(_){showControls=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        average: {get: function(){return average;}, set: function(_){average=_;}},\n        defaultState:    {get: function(){return defaultState;}, set: function(_){defaultState=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n        showXAxis:    {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis:    {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        noErrorCheck:    {get: function(){return noErrorCheck;}, set: function(_){noErrorCheck=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n        }},\n        useInteractiveGuideline: {get: function(){return useInteractiveGuideline;}, set: function(_){\n            useInteractiveGuideline = _;\n            if (_ === true) {\n                chart.interactive(false);\n                chart.useVoronoi(false);\n            }\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( (_) ? 'right' : 'left');\n        }},\n        duration:    {get: function(){return duration;}, set: function(_){\n            duration = _;\n            lines.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n            renderWatch.reset(duration);\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, lines);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","//TODO: consider deprecating by adding necessary features to multiBar model\nnv.models.discreteBar = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 960\n        , height = 500\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container\n        , x = d3.scale.ordinal()\n        , y = d3.scale.linear()\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , forceY = [0] // 0 is forced by default.. this makes sense for the majority of bar graphs... user can always do chart.forceY([]) to remove\n        , color = nv.utils.defaultColor()\n        , showValues = false\n        , valueFormat = d3.format(',.2f')\n        , xDomain\n        , yDomain\n        , xRange\n        , yRange\n        , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'elementMousemove', 'renderEnd')\n        , rectClass = 'discreteBar'\n        , duration = 250\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var x0, y0;\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                availableHeight = height - margin.top - margin.bottom;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            //add series index to each data point for reference\n            data.forEach(function(series, i) {\n                series.values.forEach(function(point) {\n                    point.series = i;\n                });\n            });\n\n            // Setup Scales\n            // remap and flatten the data for use in calculating the scales' domains\n            var seriesData = (xDomain && yDomain) ? [] : // if we know xDomain and yDomain, no need to calculate\n                data.map(function(d) {\n                    return d.values.map(function(d,i) {\n                        return { x: getX(d,i), y: getY(d,i), y0: d.y0 }\n                    })\n                });\n\n            x   .domain(xDomain || d3.merge(seriesData).map(function(d) { return d.x }))\n                .rangeBands(xRange || [0, availableWidth], .1);\n            y   .domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) { return d.y }).concat(forceY)));\n\n            // If showValues, pad the Y axis range to account for label height\n            if (showValues) y.range(yRange || [availableHeight - (y.domain()[0] < 0 ? 12 : 0), y.domain()[1] > 0 ? 12 : 0]);\n            else y.range(yRange || [availableHeight, 0]);\n\n            //store old scales if they exist\n            x0 = x0 || x;\n            y0 = y0 || y.copy().range([y(0),y(0)]);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-discretebar').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-discretebar');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-groups');\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            //TODO: by definition, the discrete bar should not have multiple groups, will modify/remove later\n            var groups = wrap.select('.nv-groups').selectAll('.nv-group')\n                .data(function(d) { return d }, function(d) { return d.key });\n            groups.enter().append('g')\n                .style('stroke-opacity', 1e-6)\n                .style('fill-opacity', 1e-6);\n            groups.exit()\n                .watchTransition(renderWatch, 'discreteBar: exit groups')\n                .style('stroke-opacity', 1e-6)\n                .style('fill-opacity', 1e-6)\n                .remove();\n            groups\n                .attr('class', function(d,i) { return 'nv-group nv-series-' + i })\n                .classed('hover', function(d) { return d.hover });\n            groups\n                .watchTransition(renderWatch, 'discreteBar: groups')\n                .style('stroke-opacity', 1)\n                .style('fill-opacity', .75);\n\n            var bars = groups.selectAll('g.nv-bar')\n                .data(function(d) { return d.values });\n            bars.exit().remove();\n\n            var barsEnter = bars.enter().append('g')\n                .attr('transform', function(d,i,j) {\n                    return 'translate(' + (x(getX(d,i)) + x.rangeBand() * .05 ) + ', ' + y(0) + ')'\n                })\n                .on('mouseover', function(d,i) { //TODO: figure out why j works above, but not here\n                    d3.select(this).classed('hover', true);\n                    dispatch.elementMouseover({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mouseout', function(d,i) {\n                    d3.select(this).classed('hover', false);\n                    dispatch.elementMouseout({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mousemove', function(d,i) {\n                    dispatch.elementMousemove({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('click', function(d,i) {\n                    var element = this;\n                    dispatch.elementClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\"),\n                        event: d3.event,\n                        element: element\n                    });\n                    d3.event.stopPropagation();\n                })\n                .on('dblclick', function(d,i) {\n                    dispatch.elementDblClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                    d3.event.stopPropagation();\n                });\n\n            barsEnter.append('rect')\n                .attr('height', 0)\n                .attr('width', x.rangeBand() * .9 / data.length )\n\n            if (showValues) {\n                barsEnter.append('text')\n                    .attr('text-anchor', 'middle')\n                ;\n\n                bars.select('text')\n                    .text(function(d,i) { return valueFormat(getY(d,i)) })\n                    .watchTransition(renderWatch, 'discreteBar: bars text')\n                    .attr('x', x.rangeBand() * .9 / 2)\n                    .attr('y', function(d,i) { return getY(d,i) < 0 ? y(getY(d,i)) - y(0) + 12 : -4 })\n\n                ;\n            } else {\n                bars.selectAll('text').remove();\n            }\n\n            bars\n                .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive' })\n                .style('fill', function(d,i) { return d.color || color(d,i) })\n                .style('stroke', function(d,i) { return d.color || color(d,i) })\n                .select('rect')\n                .attr('class', rectClass)\n                .watchTransition(renderWatch, 'discreteBar: bars rect')\n                .attr('width', x.rangeBand() * .9 / data.length);\n            bars.watchTransition(renderWatch, 'discreteBar: bars')\n                //.delay(function(d,i) { return i * 1200 / data[0].values.length })\n                .attr('transform', function(d,i) {\n                    var left = x(getX(d,i)) + x.rangeBand() * .05,\n                        top = getY(d,i) < 0 ?\n                            y(0) :\n                                y(0) - y(getY(d,i)) < 1 ?\n                            y(0) - 1 : //make 1 px positive bars show up above y=0\n                            y(getY(d,i));\n\n                    return 'translate(' + left + ', ' + top + ')'\n                })\n                .select('rect')\n                .attr('height', function(d,i) {\n                    return  Math.max(Math.abs(y(getY(d,i)) - y(0)), 1)\n                });\n\n\n            //store old scales for use in transitions on update\n            x0 = x.copy();\n            y0 = y.copy();\n\n        });\n\n        renderWatch.renderEnd('discreteBar immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:   {get: function(){return width;}, set: function(_){width=_;}},\n        height:  {get: function(){return height;}, set: function(_){height=_;}},\n        forceY:  {get: function(){return forceY;}, set: function(_){forceY=_;}},\n        showValues: {get: function(){return showValues;}, set: function(_){showValues=_;}},\n        x:       {get: function(){return getX;}, set: function(_){getX=_;}},\n        y:       {get: function(){return getY;}, set: function(_){getY=_;}},\n        xScale:  {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:  {get: function(){return y;}, set: function(_){y=_;}},\n        xDomain: {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain: {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:  {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:  {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        valueFormat:    {get: function(){return valueFormat;}, set: function(_){valueFormat=_;}},\n        id:          {get: function(){return id;}, set: function(_){id=_;}},\n        rectClass: {get: function(){return rectClass;}, set: function(_){rectClass=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.discreteBarChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var discretebar = nv.models.discreteBar()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n\t, legend = nv.models.legend()\n        , tooltip = nv.models.tooltip()\n        ;\n\n    var margin = {top: 15, right: 10, bottom: 50, left: 60}\n        , marginTop = null\n        , width = null\n        , height = null\n        , color = nv.utils.getColor()\n\t, showLegend = false\n        , showXAxis = true\n        , showYAxis = true\n        , rightAlignYAxis = false\n        , staggerLabels = false\n        , wrapLabels = false\n        , rotateLabels = 0\n        , x\n        , y\n        , noData = null\n        , dispatch = d3.dispatch('beforeUpdate','renderEnd')\n        , duration = 250\n        ;\n\n    xAxis\n        .orient('bottom')\n        .showMaxMin(false)\n        .tickFormat(function(d) { return d })\n    ;\n    yAxis\n        .orient((rightAlignYAxis) ? 'right' : 'left')\n        .tickFormat(d3.format(',.1f'))\n    ;\n\n    tooltip\n        .duration(0)\n        .headerEnabled(false)\n        .valueFormatter(function(d, i) {\n            return yAxis.tickFormat()(d, i);\n        })\n        .keyFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        });\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(discretebar);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n\n        selection.each(function(data) {\n            var container = d3.select(this),\n                that = this;\n            nv.utils.initSVG(container);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() {\n                dispatch.beforeUpdate();\n                container.transition().duration(duration).call(chart);\n            };\n            chart.container = this;\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container);\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            x = discretebar.xScale();\n            y = discretebar.yScale().clamp(true);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-discreteBarWithAxes').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-discreteBarWithAxes').append('g');\n            var defsEnter = gEnter.append('defs');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y nv-axis')\n                .append('g').attr('class', 'nv-zeroLine')\n                .append('line');\n\n            gEnter.append('g').attr('class', 'nv-barsWrap');\n\t    gEnter.append('g').attr('class', 'nv-legendWrap');\n\n            g.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                legend.width(availableWidth);\n\n                g.select('.nv-legendWrap')\n                    .datum(data)\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n                }\n\n                wrap.select('.nv-legendWrap')\n                    .attr('transform', 'translate(0,' + (-margin.top) +')')\n            }\n\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            // Main Chart Component(s)\n            discretebar\n                .width(availableWidth)\n                .height(availableHeight);\n\n            var barsWrap = g.select('.nv-barsWrap')\n                .datum(data.filter(function(d) { return !d.disabled }));\n\n            barsWrap.transition().call(discretebar);\n\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-x-label-clip-' + discretebar.id())\n                .append('rect');\n\n            g.select('#nv-x-label-clip-' + discretebar.id() + ' rect')\n                .attr('width', x.rangeBand() * (staggerLabels ? 2 : 1))\n                .attr('height', 16)\n                .attr('x', -x.rangeBand() / (staggerLabels ? 1 : 2 ));\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize(-availableHeight, 0);\n\n                g.select('.nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + (y.range()[0] + ((discretebar.showValues() && y.domain()[0] < 0) ? 16 : 0)) + ')');\n                g.select('.nv-x.nv-axis').call(xAxis);\n\n                var xTicks = g.select('.nv-x.nv-axis').selectAll('g');\n                if (staggerLabels) {\n                    xTicks\n                        .selectAll('text')\n                        .attr('transform', function(d,i,j) { return 'translate(0,' + (j % 2 == 0 ? '5' : '17') + ')' })\n                }\n\n                if (rotateLabels) {\n                    xTicks\n                        .selectAll('.tick text')\n                        .attr('transform', 'rotate(' + rotateLabels + ' 0,0)')\n                        .style('text-anchor', rotateLabels > 0 ? 'start' : 'end');\n                }\n\n                if (wrapLabels) {\n                    g.selectAll('.tick text')\n                        .call(nv.utils.wrapTicks, chart.xAxis.rangeBand())\n                }\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                    .tickSize( -availableWidth, 0);\n\n                g.select('.nv-y.nv-axis').call(yAxis);\n            }\n\n            // Zero line\n            g.select(\".nv-zeroLine line\")\n                .attr(\"x1\",0)\n                .attr(\"x2\",(rightAlignYAxis) ? -availableWidth : availableWidth)\n                .attr(\"y1\", y(0))\n                .attr(\"y2\", y(0))\n            ;\n        });\n\n        renderWatch.renderEnd('discreteBar chart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    discretebar.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt['series'] = {\n            key: chart.x()(evt.data),\n            value: chart.y()(evt.data),\n            color: evt.color\n        };\n        tooltip.data(evt).hidden(false);\n    });\n\n    discretebar.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    discretebar.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.discretebar = discretebar;\n    chart.legend = legend;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n\tshowLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        staggerLabels: {get: function(){return staggerLabels;}, set: function(_){staggerLabels=_;}},\n        rotateLabels:  {get: function(){return rotateLabels;}, set: function(_){rotateLabels=_;}},\n        wrapLabels:  {get: function(){return wrapLabels;}, set: function(_){wrapLabels=!!_;}},\n        showXAxis: {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis: {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            discretebar.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            discretebar.color(color);\n\t    legend.color(color);\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( (_) ? 'right' : 'left');\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, discretebar);\n    nv.utils.initOptions(chart);\n\n    return chart;\n}\n","\nnv.models.distribution = function() {\n    \"use strict\";\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 400 //technically width or height depending on x or y....\n        , size = 8\n        , axis = 'x' // 'x' or 'y'... horizontal or vertical\n        , getData = function(d) { return d[axis] }  // defaults d.x or d.y\n        , color = nv.utils.defaultColor()\n        , scale = d3.scale.linear()\n        , domain\n        , duration = 250\n        , dispatch = d3.dispatch('renderEnd')\n        ;\n\n    //============================================================\n\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var scale0;\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    //============================================================\n\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var availableLength = width - (axis === 'x' ? margin.left + margin.right : margin.top + margin.bottom),\n                naxis = axis == 'x' ? 'y' : 'x',\n                container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            //------------------------------------------------------------\n            // Setup Scales\n\n            scale0 = scale0 || scale;\n\n            //------------------------------------------------------------\n\n\n            //------------------------------------------------------------\n            // Setup containers and skeleton of chart\n\n            var wrap = container.selectAll('g.nv-distribution').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-distribution');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')\n\n            //------------------------------------------------------------\n\n\n            var distWrap = g.selectAll('g.nv-dist')\n                .data(function(d) { return d }, function(d) { return d.key });\n\n            distWrap.enter().append('g');\n            distWrap\n                .attr('class', function(d,i) { return 'nv-dist nv-series-' + i })\n                .style('stroke', function(d,i) { return color(d, i) });\n\n            var dist = distWrap.selectAll('line.nv-dist' + axis)\n                .data(function(d) { return d.values })\n            dist.enter().append('line')\n                .attr(axis + '1', function(d,i) { return scale0(getData(d,i)) })\n                .attr(axis + '2', function(d,i) { return scale0(getData(d,i)) })\n            renderWatch.transition(distWrap.exit().selectAll('line.nv-dist' + axis), 'dist exit')\n                // .transition()\n                .attr(axis + '1', function(d,i) { return scale(getData(d,i)) })\n                .attr(axis + '2', function(d,i) { return scale(getData(d,i)) })\n                .style('stroke-opacity', 0)\n                .remove();\n            dist\n                .attr('class', function(d,i) { return 'nv-dist' + axis + ' nv-dist' + axis + '-' + i })\n                .attr(naxis + '1', 0)\n                .attr(naxis + '2', size);\n            renderWatch.transition(dist, 'dist')\n                // .transition()\n                .attr(axis + '1', function(d,i) { return scale(getData(d,i)) })\n                .attr(axis + '2', function(d,i) { return scale(getData(d,i)) })\n\n\n            scale0 = scale.copy();\n\n        });\n        renderWatch.renderEnd('distribution immediate');\n        return chart;\n    }\n\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n    chart.options = nv.utils.optionsFunc.bind(chart);\n    chart.dispatch = dispatch;\n\n    chart.margin = function(_) {\n        if (!arguments.length) return margin;\n        margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;\n        margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;\n        margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;\n        margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;\n        return chart;\n    };\n\n    chart.width = function(_) {\n        if (!arguments.length) return width;\n        width = _;\n        return chart;\n    };\n\n    chart.axis = function(_) {\n        if (!arguments.length) return axis;\n        axis = _;\n        return chart;\n    };\n\n    chart.size = function(_) {\n        if (!arguments.length) return size;\n        size = _;\n        return chart;\n    };\n\n    chart.getData = function(_) {\n        if (!arguments.length) return getData;\n        getData = d3.functor(_);\n        return chart;\n    };\n\n    chart.scale = function(_) {\n        if (!arguments.length) return scale;\n        scale = _;\n        return chart;\n    };\n\n    chart.color = function(_) {\n        if (!arguments.length) return color;\n        color = nv.utils.getColor(_);\n        return chart;\n    };\n\n    chart.duration = function(_) {\n        if (!arguments.length) return duration;\n        duration = _;\n        renderWatch.reset(duration);\n        return chart;\n    };\n    //============================================================\n\n\n    return chart;\n}\n","nv.models.focus = function(content) {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var content = content || nv.models.line()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n        , brush = d3.svg.brush()\n        ;\n\n    var margin = {top: 10, right: 0, bottom: 30, left: 0}\n        , color = nv.utils.defaultColor()\n        , width = null\n        , height = 70\n        , showXAxis = true\n        , showYAxis = false\n        , rightAlignYAxis = false\n        , ticks = null\n        , x\n        , y\n        , brushExtent = null\n        , duration = 250\n        , dispatch = d3.dispatch('brush', 'onBrush', 'renderEnd')\n        , syncBrushing = true\n        ;\n\n    content.interactive(false);\n    content.pointActive(function(d) { return false; });\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(content);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n\n        selection.each(function(data) {\n            var container = d3.select(this);\n            nv.utils.initSVG(container);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = height - margin.top - margin.bottom;\n\n            chart.update = function() { \n                if( duration === 0 ) {\n                    container.call( chart );\n                } else {\n                    container.transition().duration(duration).call(chart);\n                }\n            };\n            chart.container = this;\n\n            // Setup Scales\n            x = content.xScale();\n            y = content.yScale();\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-focus').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-focus').append('g');\n            var g = wrap.select('g');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            gEnter.append('g').attr('class', 'nv-background').append('rect');\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y nv-axis');\n            gEnter.append('g').attr('class', 'nv-contentWrap');\n            gEnter.append('g').attr('class', 'nv-brushBackground');\n            gEnter.append('g').attr('class', 'nv-x nv-brush');\n\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            g.select('.nv-background rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n                \n            content\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    return d.color || color(d, i);\n                }).filter(function(d,i) { return !data[i].disabled; }));\n\n            var contentWrap = g.select('.nv-contentWrap')\n                .datum(data.filter(function(d) { return !d.disabled; }));\n\n            d3.transition(contentWrap).call(content);\n            \n            // Setup Brush\n            brush\n                .x(x)\n                .on('brush', function() {\n                    onBrush(syncBrushing);\n                });\n\n            brush.on('brushend', function () {\n                if (!syncBrushing) {\n                    dispatch.onBrush(brush.empty() ? x.domain() : brush.extent());\n                }\n            });\n\n            if (brushExtent) brush.extent(brushExtent);\n\n            var brushBG = g.select('.nv-brushBackground').selectAll('g')\n                .data([brushExtent || brush.extent()]);\n    \n            var brushBGenter = brushBG.enter()\n                .append('g');\n\n            brushBGenter.append('rect')\n                .attr('class', 'left')\n                .attr('x', 0)\n                .attr('y', 0)\n                .attr('height', availableHeight);\n\n            brushBGenter.append('rect')\n                .attr('class', 'right')\n                .attr('x', 0)\n                .attr('y', 0)\n                .attr('height', availableHeight);\n\n            var gBrush = g.select('.nv-x.nv-brush')\n                .call(brush);\n            gBrush.selectAll('rect')\n                .attr('height', availableHeight);\n            gBrush.selectAll('.resize').append('path').attr('d', resizePath);\n\n            onBrush(true);\n\n            g.select('.nv-background rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            if (showXAxis) {\n                xAxis.scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize(-availableHeight, 0);\n  \n                g.select('.nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + y.range()[0] + ')');\n                d3.transition(g.select('.nv-x.nv-axis'))\n                    .call(xAxis);\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                    .tickSize( -availableWidth, 0);\n\n                d3.transition(g.select('.nv-y.nv-axis'))\n                    .call(yAxis);\n            }\n            \n            g.select('.nv-x.nv-axis')\n                .attr('transform', 'translate(0,' + y.range()[0] + ')');\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            //============================================================\n            // Functions\n            //------------------------------------------------------------\n    \n            // Taken from crossfilter (http://square.github.com/crossfilter/)\n            function resizePath(d) {\n                var e = +(d == 'e'),\n                    x = e ? 1 : -1,\n                    y = availableHeight / 3;\n                return 'M' + (0.5 * x) + ',' + y\n                    + 'A6,6 0 0 ' + e + ' ' + (6.5 * x) + ',' + (y + 6)\n                    + 'V' + (2 * y - 6)\n                    + 'A6,6 0 0 ' + e + ' ' + (0.5 * x) + ',' + (2 * y)\n                    + 'Z'\n                    + 'M' + (2.5 * x) + ',' + (y + 8)\n                    + 'V' + (2 * y - 8)\n                    + 'M' + (4.5 * x) + ',' + (y + 8)\n                    + 'V' + (2 * y - 8);\n            }\n    \n    \n            function updateBrushBG() {\n                if (!brush.empty()) brush.extent(brushExtent);\n                brushBG\n                    .data([brush.empty() ? x.domain() : brushExtent])\n                    .each(function(d,i) {\n                        var leftWidth = x(d[0]) - x.range()[0],\n                            rightWidth = availableWidth - x(d[1]);\n                        d3.select(this).select('.left')\n                            .attr('width',  leftWidth < 0 ? 0 : leftWidth);\n    \n                        d3.select(this).select('.right')\n                            .attr('x', x(d[1]))\n                            .attr('width', rightWidth < 0 ? 0 : rightWidth);\n                    });\n            }\n\n\n            function onBrush(shouldDispatch) {\n                brushExtent = brush.empty() ? null : brush.extent();\n                var extent = brush.empty() ? x.domain() : brush.extent();\n                dispatch.brush({extent: extent, brush: brush});\n                updateBrushBG();\n                if (shouldDispatch) {\n                    dispatch.onBrush(extent);\n                }\n            }\n        });\n\n        renderWatch.renderEnd('focus immediate');\n        return chart;\n    }\n\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.content = content;\n    chart.brush = brush;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showXAxis:      {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis:    {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        brushExtent: {get: function(){return brushExtent;}, set: function(_){brushExtent=_;}},\n        syncBrushing: {get: function(){return syncBrushing;}, set: function(_){syncBrushing=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            content.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            content.color(color);\n        }},\n        interpolate: {get: function(){return content.interpolate();}, set: function(_){\n            content.interpolate(_);\n        }},\n        xTickFormat: {get: function(){return xAxis.tickFormat();}, set: function(_){\n            xAxis.tickFormat(_);\n        }},\n        yTickFormat: {get: function(){return yAxis.tickFormat();}, set: function(_){\n            yAxis.tickFormat(_);\n        }},\n        x: {get: function(){return content.x();}, set: function(_){\n            content.x(_);\n        }},\n        y: {get: function(){return content.y();}, set: function(_){\n            content.y(_);\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( rightAlignYAxis ? 'right' : 'left');\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, content);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","nv.models.forceDirectedGraph = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n    var margin = {top: 2, right: 0, bottom: 2, left: 0}\n        , width = 400\n        , height = 32\n        , container = null\n        , dispatch = d3.dispatch('renderEnd')\n        , color = nv.utils.getColor(['#000'])\n        , tooltip      = nv.models.tooltip()\n        , noData = null\n        // Force directed graph specific parameters [default values]\n        , linkStrength = 0.1\n        , friction = 0.9\n        , linkDist = 30\n        , charge = -120\n        , gravity = 0.1\n        , theta = 0.8\n        , alpha = 0.1\n        , radius = 5\n        // These functions allow to add extra attributes to ndes and links\n        ,nodeExtras = function(nodes) { /* Do nothing */ }\n        ,linkExtras = function(links) { /* Do nothing */ }\n        ;\n\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n\n    function chart(selection) {\n        renderWatch.reset();\n\n        selection.each(function(data) {\n          container = d3.select(this);\n          nv.utils.initSVG(container);\n\n          var availableWidth = nv.utils.availableWidth(width, container, margin),\n              availableHeight = nv.utils.availableHeight(height, container, margin);\n\n          container\n                  .attr(\"width\", availableWidth)\n                  .attr(\"height\", availableHeight);\n\n          // Display No Data message if there's nothing to show.\n          if (!data || !data.links || !data.nodes) {\n              nv.utils.noData(chart, container)\n              return chart;\n          } else {\n              container.selectAll('.nv-noData').remove();\n          }\n          container.selectAll('*').remove();\n\n          // Collect names of all fields in the nodes\n          var nodeFieldSet = new Set();\n          data.nodes.forEach(function(node) {\n            var keys = Object.keys(node);\n            keys.forEach(function(key) {\n              nodeFieldSet.add(key);\n            });\n          });\n\n          var force = d3.layout.force()\n                .nodes(data.nodes)\n                .links(data.links)\n                .size([availableWidth, availableHeight])\n                .linkStrength(linkStrength)\n                .friction(friction)\n                .linkDistance(linkDist)\n                .charge(charge)\n                .gravity(gravity)\n                .theta(theta)\n                .alpha(alpha)\n                .start();\n\n          var link = container.selectAll(\".link\")\n                .data(data.links)\n                .enter().append(\"line\")\n                .attr(\"class\", \"nv-force-link\")\n                .style(\"stroke-width\", function(d) { return Math.sqrt(d.value); });\n\n          var node = container.selectAll(\".node\")\n                .data(data.nodes)\n                .enter()\n                .append(\"g\")\n                .attr(\"class\", \"nv-force-node\")\n                .call(force.drag);\n\n          node\n            .append(\"circle\")\n            .attr(\"r\", radius)\n            .style(\"fill\", function(d) { return color(d) } )\n            .on(\"mouseover\", function(evt) {\n              container.select('.nv-series-' + evt.seriesIndex + ' .nv-distx-' + evt.pointIndex)\n                  .attr('y1', evt.py);\n              container.select('.nv-series-' + evt.seriesIndex + ' .nv-disty-' + evt.pointIndex)\n                  .attr('x2', evt.px);\n\n              // Add 'series' object to\n              var nodeColor = color(evt);\n              evt.series = [];\n              nodeFieldSet.forEach(function(field) {\n                evt.series.push({\n                  color: nodeColor,\n                  key:   field,\n                  value: evt[field]\n                });\n              });\n              tooltip.data(evt).hidden(false);\n            })\n            .on(\"mouseout\",  function(d) {\n              tooltip.hidden(true);\n            });\n\n          tooltip.headerFormatter(function(d) {return \"Node\";});\n\n          // Apply extra attributes to nodes and links (if any)\n          linkExtras(link);\n          nodeExtras(node);\n\n          force.on(\"tick\", function() {\n              link.attr(\"x1\", function(d) { return d.source.x; })\n                  .attr(\"y1\", function(d) { return d.source.y; })\n                  .attr(\"x2\", function(d) { return d.target.x; })\n                  .attr(\"y2\", function(d) { return d.target.y; });\n\n              node.attr(\"transform\", function(d) {\n                return \"translate(\" + d.x + \", \" + d.y + \")\";\n              });\n            });\n        });\n\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:     {get: function(){return width;}, set: function(_){width=_;}},\n        height:    {get: function(){return height;}, set: function(_){height=_;}},\n\n        // Force directed graph specific parameters\n        linkStrength:{get: function(){return linkStrength;}, set: function(_){linkStrength=_;}},\n        friction:    {get: function(){return friction;}, set: function(_){friction=_;}},\n        linkDist:    {get: function(){return linkDist;}, set: function(_){linkDist=_;}},\n        charge:      {get: function(){return charge;}, set: function(_){charge=_;}},\n        gravity:     {get: function(){return gravity;}, set: function(_){gravity=_;}},\n        theta:       {get: function(){return theta;}, set: function(_){theta=_;}},\n        alpha:       {get: function(){return alpha;}, set: function(_){alpha=_;}},\n        radius:      {get: function(){return radius;}, set: function(_){radius=_;}},\n\n        //functor options\n        x: {get: function(){return getX;}, set: function(_){getX=d3.functor(_);}},\n        y: {get: function(){return getY;}, set: function(_){getY=d3.functor(_);}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n        nodeExtras: {get: function(){return nodeExtras;}, set: function(_){\n            nodeExtras = _;\n        }},\n        linkExtras: {get: function(){return linkExtras;}, set: function(_){\n            linkExtras = _;\n        }}\n    });\n\n    chart.dispatch = dispatch;\n    chart.tooltip = tooltip;\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","nv.models.furiousLegend = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 5, right: 0, bottom: 5, left: 0}\n        , width = 400\n        , height = 20\n        , getKey = function(d) { return d.key }\n        , keyFormatter = function (d) { return d }\n        , color = nv.utils.getColor()\n        , maxKeyLength = 20 //default value for key lengths\n        , align = true\n        , padding = 28 //define how much space between legend items. - recommend 32 for furious version\n        , rightAlign = true\n        , updateState = true   //If true, legend will update data.disabled and trigger a 'stateChange' dispatch.\n        , radioButtonMode = false   //If true, clicking legend items will cause it to behave like a radio button. (only one can be selected at a time)\n        , expanded = false\n        , dispatch = d3.dispatch('legendClick', 'legendDblclick', 'legendMouseover', 'legendMouseout', 'stateChange')\n        , vers = 'classic' //Options are \"classic\" and \"furious\"\n        ;\n\n    function chart(selection) {\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-legend').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-legend').append('g');\n            var g = wrap.select('g');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            var series = g.selectAll('.nv-series')\n                .data(function(d) {\n                    if(vers != 'furious') return d;\n\n                    return d.filter(function(n) {\n                        return expanded ? true : !n.disengaged;\n                    });\n                });\n            var seriesEnter = series.enter().append('g').attr('class', 'nv-series')\n\n            var seriesShape;\n\n            if(vers == 'classic') {\n                seriesEnter.append('circle')\n                    .style('stroke-width', 2)\n                    .attr('class','nv-legend-symbol')\n                    .attr('r', 5);\n\n                seriesShape = series.select('circle');\n            } else if (vers == 'furious') {\n                seriesEnter.append('rect')\n                    .style('stroke-width', 2)\n                    .attr('class','nv-legend-symbol')\n                    .attr('rx', 3)\n                    .attr('ry', 3);\n\n                seriesShape = series.select('rect');\n\n                seriesEnter.append('g')\n                    .attr('class', 'nv-check-box')\n                    .property('innerHTML','<path d=\"M0.5,5 L22.5,5 L22.5,26.5 L0.5,26.5 L0.5,5 Z\" class=\"nv-box\"></path><path d=\"M5.5,12.8618467 L11.9185089,19.2803556 L31,0.198864511\" class=\"nv-check\"></path>')\n                    .attr('transform', 'translate(-10,-8)scale(0.5)');\n\n                var seriesCheckbox = series.select('.nv-check-box');\n\n                seriesCheckbox.each(function(d,i) {\n                    d3.select(this).selectAll('path')\n                        .attr('stroke', setTextColor(d,i));\n                });\n            }\n\n            seriesEnter.append('text')\n                .attr('text-anchor', 'start')\n                .attr('class','nv-legend-text')\n                .attr('dy', '.32em')\n                .attr('dx', '8');\n\n            var seriesText = series.select('text.nv-legend-text');\n\n            series\n                .on('mouseover', function(d,i) {\n                    dispatch.legendMouseover(d,i);  //TODO: Make consistent with other event objects\n                })\n                .on('mouseout', function(d,i) {\n                    dispatch.legendMouseout(d,i);\n                })\n                .on('click', function(d,i) {\n                    dispatch.legendClick(d,i);\n                    // make sure we re-get data in case it was modified\n                    var data = series.data();\n                    if (updateState) {\n                        if(vers =='classic') {\n                            if (radioButtonMode) {\n                                //Radio button mode: set every series to disabled,\n                                //  and enable the clicked series.\n                                data.forEach(function(series) { series.disabled = true});\n                                d.disabled = false;\n                            }\n                            else {\n                                d.disabled = !d.disabled;\n                                if (data.every(function(series) { return series.disabled})) {\n                                    //the default behavior of NVD3 legends is, if every single series\n                                    // is disabled, turn all series' back on.\n                                    data.forEach(function(series) { series.disabled = false});\n                                }\n                            }\n                        } else if(vers == 'furious') {\n                            if(expanded) {\n                                d.disengaged = !d.disengaged;\n                                d.userDisabled = d.userDisabled == undefined ? !!d.disabled : d.userDisabled;\n                                d.disabled = d.disengaged || d.userDisabled;\n                            } else if (!expanded) {\n                                d.disabled = !d.disabled;\n                                d.userDisabled = d.disabled;\n                                var engaged = data.filter(function(d) { return !d.disengaged; });\n                                if (engaged.every(function(series) { return series.userDisabled })) {\n                                    //the default behavior of NVD3 legends is, if every single series\n                                    // is disabled, turn all series' back on.\n                                    data.forEach(function(series) {\n                                        series.disabled = series.userDisabled = false;\n                                    });\n                                }\n                            }\n                        }\n                        dispatch.stateChange({\n                            disabled: data.map(function(d) { return !!d.disabled }),\n                            disengaged: data.map(function(d) { return !!d.disengaged })\n                        });\n\n                    }\n                })\n                .on('dblclick', function(d,i) {\n                    if(vers == 'furious' && expanded) return;\n                    dispatch.legendDblclick(d,i);\n                    if (updateState) {\n                        // make sure we re-get data in case it was modified\n                        var data = series.data();\n                        //the default behavior of NVD3 legends, when double clicking one,\n                        // is to set all other series' to false, and make the double clicked series enabled.\n                        data.forEach(function(series) {\n                            series.disabled = true;\n                            if(vers == 'furious') series.userDisabled = series.disabled;\n                        });\n                        d.disabled = false;\n                        if(vers == 'furious') d.userDisabled = d.disabled;\n                        dispatch.stateChange({\n                            disabled: data.map(function(d) { return !!d.disabled })\n                        });\n                    }\n                });\n\n            series.classed('nv-disabled', function(d) { return d.userDisabled });\n            series.exit().remove();\n\n            seriesText\n                .attr('fill', setTextColor)\n                .text(function (d) { return keyFormatter(getKey(d)) });\n\n            //TODO: implement fixed-width and max-width options (max-width is especially useful with the align option)\n            // NEW ALIGNING CODE, TODO: clean up\n\n            var versPadding;\n            switch(vers) {\n                case 'furious' :\n                    versPadding = 23;\n                    break;\n                case 'classic' :\n                    versPadding = 20;\n            }\n\n            if (align) {\n\n                var seriesWidths = [];\n                series.each(function(d,i) {\n                    var legendText;\n                    if (keyFormatter(getKey(d)) && keyFormatter(getKey(d)).length > maxKeyLength) {\n                        var trimmedKey = keyFormatter(getKey(d)).substring(0, maxKeyLength);\n                        legendText = d3.select(this).select('text').text(trimmedKey + \"...\");\n                        d3.select(this).append(\"svg:title\").text(keyFormatter(getKey(d)));\n                    } else {\n                        legendText = d3.select(this).select('text');\n                    }\n                    var nodeTextLength;\n                    try {\n                        nodeTextLength = legendText.node().getComputedTextLength();\n                        // If the legendText is display:none'd (nodeTextLength == 0), simulate an error so we approximate, instead\n                        if(nodeTextLength <= 0) throw Error();\n                    }\n                    catch(e) {\n                        nodeTextLength = nv.utils.calcApproxTextWidth(legendText);\n                    }\n\n                    seriesWidths.push(nodeTextLength + padding);\n                });\n\n                var seriesPerRow = 0;\n                var legendWidth = 0;\n                var columnWidths = [];\n\n                while ( legendWidth < availableWidth && seriesPerRow < seriesWidths.length) {\n                    columnWidths[seriesPerRow] = seriesWidths[seriesPerRow];\n                    legendWidth += seriesWidths[seriesPerRow++];\n                }\n                if (seriesPerRow === 0) seriesPerRow = 1; //minimum of one series per row\n\n                while ( legendWidth > availableWidth && seriesPerRow > 1 ) {\n                    columnWidths = [];\n                    seriesPerRow--;\n\n                    for (var k = 0; k < seriesWidths.length; k++) {\n                        if (seriesWidths[k] > (columnWidths[k % seriesPerRow] || 0) )\n                            columnWidths[k % seriesPerRow] = seriesWidths[k];\n                    }\n\n                    legendWidth = columnWidths.reduce(function(prev, cur, index, array) {\n                        return prev + cur;\n                    });\n                }\n\n                var xPositions = [];\n                for (var i = 0, curX = 0; i < seriesPerRow; i++) {\n                    xPositions[i] = curX;\n                    curX += columnWidths[i];\n                }\n\n                series\n                    .attr('transform', function(d, i) {\n                        return 'translate(' + xPositions[i % seriesPerRow] + ',' + (5 + Math.floor(i / seriesPerRow) * versPadding) + ')';\n                    });\n\n                //position legend as far right as possible within the total width\n                if (rightAlign) {\n                    g.attr('transform', 'translate(' + (width - margin.right - legendWidth) + ',' + margin.top + ')');\n                }\n                else {\n                    g.attr('transform', 'translate(0' + ',' + margin.top + ')');\n                }\n\n                height = margin.top + margin.bottom + (Math.ceil(seriesWidths.length / seriesPerRow) * versPadding);\n\n            } else {\n\n                var ypos = 5,\n                    newxpos = 5,\n                    maxwidth = 0,\n                    xpos;\n                series\n                    .attr('transform', function(d, i) {\n                        var length = d3.select(this).select('text').node().getComputedTextLength() + padding;\n                        xpos = newxpos;\n\n                        if (width < margin.left + margin.right + xpos + length) {\n                            newxpos = xpos = 5;\n                            ypos += versPadding;\n                        }\n\n                        newxpos += length;\n                        if (newxpos > maxwidth) maxwidth = newxpos;\n\n                        return 'translate(' + xpos + ',' + ypos + ')';\n                    });\n\n                //position legend as far right as possible within the total width\n                g.attr('transform', 'translate(' + (width - margin.right - maxwidth) + ',' + margin.top + ')');\n\n                height = margin.top + margin.bottom + ypos + 15;\n            }\n\n            if(vers == 'furious') {\n                // Size rectangles after text is placed\n                seriesShape\n                    .attr('width', function(d,i) {\n                        return seriesText[0][i].getComputedTextLength() + 27;\n                    })\n                    .attr('height', 18)\n                    .attr('y', -9)\n                    .attr('x', -15)\n            }\n\n            seriesShape\n                .style('fill', setBGColor)\n                .style('stroke', function(d,i) { return d.color || color(d, i) });\n        });\n\n        function setTextColor(d,i) {\n            if(vers != 'furious') return '#000';\n            if(expanded) {\n                return d.disengaged ? color(d,i) : '#fff';\n            } else if (!expanded) {\n                return !!d.disabled ? color(d,i) : '#fff';\n            }\n        }\n\n        function setBGColor(d,i) {\n            if(expanded && vers == 'furious') {\n                return d.disengaged ? '#fff' : color(d,i);\n            } else {\n                return !!d.disabled ? '#fff' : color(d,i);\n            }\n        }\n\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:          {get: function(){return width;}, set: function(_){width=_;}},\n        height:         {get: function(){return height;}, set: function(_){height=_;}},\n        key:            {get: function(){return getKey;}, set: function(_){getKey=_;}},\n        keyFormatter:   {get: function(){return keyFormatter;}, set: function(_){keyFormatter=_;}},\n        align:          {get: function(){return align;}, set: function(_){align=_;}},\n        rightAlign:     {get: function(){return rightAlign;}, set: function(_){rightAlign=_;}},\n        maxKeyLength:   {get: function(){return maxKeyLength;}, set: function(_){maxKeyLength=_;}},\n        padding:        {get: function(){return padding;}, set: function(_){padding=_;}},\n        updateState:    {get: function(){return updateState;}, set: function(_){updateState=_;}},\n        radioButtonMode:{get: function(){return radioButtonMode;}, set: function(_){radioButtonMode=_;}},\n        expanded:       {get: function(){return expanded;}, set: function(_){expanded=_;}},\n        vers:           {get: function(){return vers;}, set: function(_){vers=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","//TODO: consider deprecating and using multibar with single series for this\nnv.models.historicalBar = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = null\n        , height = null\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container = null\n        , x = d3.scale.linear()\n        , y = d3.scale.linear()\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , forceX = []\n        , forceY = [0]\n        , padData = false\n        , clipEdge = true\n        , color = nv.utils.defaultColor()\n        , xDomain\n        , yDomain\n        , xRange\n        , yRange\n        , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'elementMousemove', 'renderEnd')\n        , interactive = true\n        ;\n\n    var renderWatch = nv.utils.renderWatch(dispatch, 0);\n\n    function chart(selection) {\n        selection.each(function(data) {\n            renderWatch.reset();\n\n            container = d3.select(this);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            nv.utils.initSVG(container);\n\n            // Setup Scales\n            x.domain(xDomain || d3.extent(data[0].values.map(getX).concat(forceX) ));\n\n            if (padData)\n                x.range(xRange || [availableWidth * .5 / data[0].values.length, availableWidth * (data[0].values.length - .5)  / data[0].values.length ]);\n            else\n                x.range(xRange || [0, availableWidth]);\n\n            y.domain(yDomain || d3.extent(data[0].values.map(getY).concat(forceY) ))\n                .range(yRange || [availableHeight, 0]);\n\n            // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point\n            if (x.domain()[0] === x.domain()[1])\n                x.domain()[0] ?\n                    x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])\n                    : x.domain([-1,1]);\n\n            if (y.domain()[0] === y.domain()[1])\n                y.domain()[0] ?\n                    y.domain([y.domain()[0] + y.domain()[0] * 0.01, y.domain()[1] - y.domain()[1] * 0.01])\n                    : y.domain([-1,1]);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-historicalBar-' + id).data([data[0].values]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-historicalBar-' + id);\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-bars');\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            container\n                .on('click', function(d,i) {\n                    dispatch.chartClick({\n                        data: d,\n                        index: i,\n                        pos: d3.event,\n                        id: id\n                    });\n                });\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-chart-clip-path-' + id)\n                .append('rect');\n\n            wrap.select('#nv-chart-clip-path-' + id + ' rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            g.attr('clip-path', clipEdge ? 'url(#nv-chart-clip-path-' + id + ')' : '');\n\n            var bars = wrap.select('.nv-bars').selectAll('.nv-bar')\n                .data(function(d) { return d }, function(d,i) {return getX(d,i)});\n            bars.exit().remove();\n\n            bars.enter().append('rect')\n                .attr('x', 0 )\n                .attr('y', function(d,i) {  return nv.utils.NaNtoZero(y(Math.max(0, getY(d,i)))) })\n                .attr('height', function(d,i) { return nv.utils.NaNtoZero(Math.abs(y(getY(d,i)) - y(0))) })\n                .attr('transform', function(d,i) { return 'translate(' + (x(getX(d,i)) - availableWidth / data[0].values.length * .45) + ',0)'; })\n                .on('mouseover', function(d,i) {\n                    if (!interactive) return;\n                    d3.select(this).classed('hover', true);\n                    dispatch.elementMouseover({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n\n                })\n                .on('mouseout', function(d,i) {\n                    if (!interactive) return;\n                    d3.select(this).classed('hover', false);\n                    dispatch.elementMouseout({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mousemove', function(d,i) {\n                    if (!interactive) return;\n                    dispatch.elementMousemove({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('click', function(d,i) {\n                    if (!interactive) return;\n                    var element = this;\n                    dispatch.elementClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\"),\n                        event: d3.event,\n                        element: element\n                    });\n                    d3.event.stopPropagation();\n                })\n                .on('dblclick', function(d,i) {\n                    if (!interactive) return;\n                    dispatch.elementDblClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                    d3.event.stopPropagation();\n                });\n\n            bars\n                .attr('fill', function(d,i) { return color(d, i); })\n                .attr('class', function(d,i,j) { return (getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive') + ' nv-bar-' + j + '-' + i })\n                .watchTransition(renderWatch, 'bars')\n                .attr('transform', function(d,i) { return 'translate(' + (x(getX(d,i)) - availableWidth / data[0].values.length * .45) + ',0)'; })\n                //TODO: better width calculations that don't assume always uniform data spacing;w\n                .attr('width', (availableWidth / data[0].values.length) * .9 );\n\n            bars.watchTransition(renderWatch, 'bars')\n                .attr('y', function(d,i) {\n                    var rval = getY(d,i) < 0 ?\n                        y(0) :\n                            y(0) - y(getY(d,i)) < 1 ?\n                        y(0) - 1 :\n                        y(getY(d,i));\n                    return nv.utils.NaNtoZero(rval);\n                })\n                .attr('height', function(d,i) { return nv.utils.NaNtoZero(Math.max(Math.abs(y(getY(d,i)) - y(0)),1)) });\n\n        });\n\n        renderWatch.renderEnd('historicalBar immediate');\n        return chart;\n    }\n\n    //Create methods to allow outside functions to highlight a specific bar.\n    chart.highlightPoint = function(pointIndex, isHoverOver) {\n        container\n            .select(\".nv-bars .nv-bar-0-\" + pointIndex)\n            .classed(\"hover\", isHoverOver)\n        ;\n    };\n\n    chart.clearHighlights = function() {\n        container\n            .select(\".nv-bars .nv-bar.hover\")\n            .classed(\"hover\", false)\n        ;\n    };\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:   {get: function(){return width;}, set: function(_){width=_;}},\n        height:  {get: function(){return height;}, set: function(_){height=_;}},\n        forceX:  {get: function(){return forceX;}, set: function(_){forceX=_;}},\n        forceY:  {get: function(){return forceY;}, set: function(_){forceY=_;}},\n        padData: {get: function(){return padData;}, set: function(_){padData=_;}},\n        x:       {get: function(){return getX;}, set: function(_){getX=_;}},\n        y:       {get: function(){return getY;}, set: function(_){getY=_;}},\n        xScale:  {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:  {get: function(){return y;}, set: function(_){y=_;}},\n        xDomain: {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain: {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:  {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:  {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        clipEdge:    {get: function(){return clipEdge;}, set: function(_){clipEdge=_;}},\n        id:          {get: function(){return id;}, set: function(_){id=_;}},\n        interactive: {get: function(){return interactive;}, set: function(_){interactive=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.historicalBarChart = function(bar_model) {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var bars = bar_model || nv.models.historicalBar()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n        , legend = nv.models.legend()\n        , interactiveLayer = nv.interactiveGuideline()\n        , tooltip = nv.models.tooltip()\n        ;\n\n\n    var margin = {top: 30, right: 90, bottom: 50, left: 90}\n        , marginTop = null\n        , color = nv.utils.defaultColor()\n        , width = null\n        , height = null\n        , showLegend = false\n        , showXAxis = true\n        , showYAxis = true\n        , rightAlignYAxis = false\n        , useInteractiveGuideline = false\n        , x\n        , y\n        , state = {}\n        , defaultState = null\n        , noData = null\n        , dispatch = d3.dispatch('tooltipHide', 'stateChange', 'changeState', 'renderEnd')\n        , transitionDuration = 250\n        ;\n\n    xAxis.orient('bottom').tickPadding(7);\n    yAxis.orient( (rightAlignYAxis) ? 'right' : 'left');\n    tooltip\n        .duration(0)\n        .headerEnabled(false)\n        .valueFormatter(function(d, i) {\n            return yAxis.tickFormat()(d, i);\n        })\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        });\n\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch, 0);\n\n    function chart(selection) {\n        selection.each(function(data) {\n            renderWatch.reset();\n            renderWatch.models(bars);\n            if (showXAxis) renderWatch.models(xAxis);\n            if (showYAxis) renderWatch.models(yAxis);\n\n            var container = d3.select(this),\n                that = this;\n            nv.utils.initSVG(container);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() { container.transition().duration(transitionDuration).call(chart) };\n            chart.container = this;\n\n            //set state.disabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display noData message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            x = bars.xScale();\n            y = bars.yScale();\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-historicalBarChart').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-historicalBarChart').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y nv-axis');\n            gEnter.append('g').attr('class', 'nv-barsWrap');\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n            gEnter.append('g').attr('class', 'nv-interactive');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                legend.width(availableWidth);\n\n                g.select('.nv-legendWrap')\n                    .datum(data)\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n                }\n\n                wrap.select('.nv-legendWrap')\n                    .attr('transform', 'translate(0,' + (-margin.top) +')')\n            }\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            //Set up interactive layer\n            if (useInteractiveGuideline) {\n                interactiveLayer\n                    .width(availableWidth)\n                    .height(availableHeight)\n                    .margin({left:margin.left, top:margin.top})\n                    .svgContainer(container)\n                    .xScale(x);\n                wrap.select(\".nv-interactive\").call(interactiveLayer);\n            }\n            bars\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    return d.color || color(d, i);\n                }).filter(function(d,i) { return !data[i].disabled }));\n\n            var barsWrap = g.select('.nv-barsWrap')\n                .datum(data.filter(function(d) { return !d.disabled }));\n            barsWrap.transition().call(bars);\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize(-availableHeight, 0);\n\n                g.select('.nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + y.range()[0] + ')');\n                g.select('.nv-x.nv-axis')\n                    .transition()\n                    .call(xAxis);\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                    .tickSize( -availableWidth, 0);\n\n                g.select('.nv-y.nv-axis')\n                    .transition()\n                    .call(yAxis);\n            }\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            interactiveLayer.dispatch.on('elementMousemove', function(e) {\n                bars.clearHighlights();\n\n                var singlePoint, pointIndex, pointXLocation, allData = [];\n                data\n                    .filter(function(series, i) {\n                        series.seriesIndex = i;\n                        return !series.disabled;\n                    })\n                    .forEach(function(series,i) {\n                        pointIndex = nv.interactiveBisect(series.values, e.pointXValue, chart.x());\n                        bars.highlightPoint(pointIndex,true);\n                        var point = series.values[pointIndex];\n                        if (point === undefined) return;\n                        if (singlePoint === undefined) singlePoint = point;\n                        if (pointXLocation === undefined) pointXLocation = chart.xScale()(chart.x()(point,pointIndex));\n                        allData.push({\n                            key: series.key,\n                            value: chart.y()(point, pointIndex),\n                            color: color(series,series.seriesIndex),\n                            data: series.values[pointIndex]\n                        });\n                    });\n\n                var xValue = xAxis.tickFormat()(chart.x()(singlePoint,pointIndex));\n                interactiveLayer.tooltip\n                    .valueFormatter(function(d,i) {\n                        return yAxis.tickFormat()(d);\n                    })\n                    .data({\n                        value: xValue,\n                        index: pointIndex,\n                        series: allData\n                    })();\n\n                interactiveLayer.renderGuideLine(pointXLocation);\n\n            });\n\n            interactiveLayer.dispatch.on(\"elementMouseout\",function(e) {\n                dispatch.tooltipHide();\n                bars.clearHighlights();\n            });\n\n            legend.dispatch.on('legendClick', function(d,i) {\n                d.disabled = !d.disabled;\n\n                if (!data.filter(function(d) { return !d.disabled }).length) {\n                    data.map(function(d) {\n                        d.disabled = false;\n                        wrap.selectAll('.nv-series').classed('disabled', false);\n                        return d;\n                    });\n                }\n\n                state.disabled = data.map(function(d) { return !!d.disabled });\n                dispatch.stateChange(state);\n\n                selection.transition().call(chart);\n            });\n\n            legend.dispatch.on('legendDblclick', function(d) {\n                //Double clicking should always enable current series, and disabled all others.\n                data.forEach(function(d) {\n                    d.disabled = true;\n                });\n                d.disabled = false;\n\n                state.disabled = data.map(function(d) { return !!d.disabled });\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            dispatch.on('changeState', function(e) {\n                if (typeof e.disabled !== 'undefined') {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n\n                    state.disabled = e.disabled;\n                }\n\n                chart.update();\n            });\n        });\n\n        renderWatch.renderEnd('historicalBarChart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    bars.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt['series'] = {\n            key: chart.x()(evt.data),\n            value: chart.y()(evt.data),\n            color: evt.color\n        };\n        tooltip.data(evt).hidden(false);\n    });\n\n    bars.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    bars.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.bars = bars;\n    chart.legend = legend;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.interactiveLayer = interactiveLayer;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        showXAxis: {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis: {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        defaultState:    {get: function(){return defaultState;}, set: function(_){defaultState=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n            bars.color(color);\n        }},\n        duration:    {get: function(){return transitionDuration;}, set: function(_){\n            transitionDuration=_;\n            renderWatch.reset(transitionDuration);\n            yAxis.duration(transitionDuration);\n            xAxis.duration(transitionDuration);\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( (_) ? 'right' : 'left');\n        }},\n        useInteractiveGuideline: {get: function(){return useInteractiveGuideline;}, set: function(_){\n            useInteractiveGuideline = _;\n            if (_ === true) {\n                chart.interactive(false);\n            }\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, bars);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n\n\n// ohlcChart is just a historical chart with ohlc bars and some tweaks\nnv.models.ohlcBarChart = function() {\n    var chart = nv.models.historicalBarChart(nv.models.ohlcBar());\n\n    // special default tooltip since we show multiple values per x\n    chart.useInteractiveGuideline(true);\n    chart.interactiveLayer.tooltip.contentGenerator(function(data) {\n        // we assume only one series exists for this chart\n        var d = data.series[0].data;\n        // match line colors as defined in nv.d3.css\n        var color = d.open < d.close ? \"2ca02c\" : \"d62728\";\n        return '' +\n            '<h3 style=\"color: #' + color + '\">' + data.value + '</h3>' +\n            '<table>' +\n            '<tr><td>open:</td><td>' + chart.yAxis.tickFormat()(d.open) + '</td></tr>' +\n            '<tr><td>close:</td><td>' + chart.yAxis.tickFormat()(d.close) + '</td></tr>' +\n            '<tr><td>high</td><td>' + chart.yAxis.tickFormat()(d.high) + '</td></tr>' +\n            '<tr><td>low:</td><td>' + chart.yAxis.tickFormat()(d.low) + '</td></tr>' +\n            '</table>';\n    });\n    return chart;\n};\n\n// candlestickChart is just a historical chart with candlestick bars and some tweaks\nnv.models.candlestickBarChart = function() {\n    var chart = nv.models.historicalBarChart(nv.models.candlestickBar());\n\n    // special default tooltip since we show multiple values per x\n    chart.useInteractiveGuideline(true);\n    chart.interactiveLayer.tooltip.contentGenerator(function(data) {\n        // we assume only one series exists for this chart\n        var d = data.series[0].data;\n        // match line colors as defined in nv.d3.css\n        var color = d.open < d.close ? \"2ca02c\" : \"d62728\";\n        return '' +\n            '<h3 style=\"color: #' + color + '\">' + data.value + '</h3>' +\n            '<table>' +\n            '<tr><td>open:</td><td>' + chart.yAxis.tickFormat()(d.open) + '</td></tr>' +\n            '<tr><td>close:</td><td>' + chart.yAxis.tickFormat()(d.close) + '</td></tr>' +\n            '<tr><td>high</td><td>' + chart.yAxis.tickFormat()(d.high) + '</td></tr>' +\n            '<tr><td>low:</td><td>' + chart.yAxis.tickFormat()(d.low) + '</td></tr>' +\n            '</table>';\n    });\n    return chart;\n};\n","nv.models.legend = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 5, right: 0, bottom: 5, left: 0}\n        , width = 400\n        , height = 20\n        , getKey = function(d) { return d.key }\n        , keyFormatter = function (d) { return d }\n        , color = nv.utils.getColor()\n        , maxKeyLength = 20 //default value for key lengths\n        , align = true\n        , padding = 32 //define how much space between legend items. - recommend 32 for furious version\n        , rightAlign = true\n        , updateState = true   //If true, legend will update data.disabled and trigger a 'stateChange' dispatch.\n        , radioButtonMode = false   //If true, clicking legend items will cause it to behave like a radio button. (only one can be selected at a time)\n        , expanded = false\n        , dispatch = d3.dispatch('legendClick', 'legendDblclick', 'legendMouseover', 'legendMouseout', 'stateChange')\n        , vers = 'classic' //Options are \"classic\" and \"furious\"\n        ;\n\n    function chart(selection) {\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-legend').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-legend').append('g');\n            var g = wrap.select('g');\n\n            if (rightAlign)\n                wrap.attr('transform', 'translate(' + (- margin.right) + ',' + margin.top + ')');\n            else\n                wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            var series = g.selectAll('.nv-series')\n                .data(function(d) {\n                    if(vers != 'furious') return d;\n\n                    return d.filter(function(n) {\n                        return expanded ? true : !n.disengaged;\n                    });\n                });\n\n            var seriesEnter = series.enter().append('g').attr('class', 'nv-series');\n            var seriesShape;\n\n            var versPadding;\n            switch(vers) {\n                case 'furious' :\n                    versPadding = 23;\n                    break;\n                case 'classic' :\n                    versPadding = 20;\n            }\n\n            if(vers == 'classic') {\n                seriesEnter.append('circle')\n                    .style('stroke-width', 2)\n                    .attr('class','nv-legend-symbol')\n                    .attr('r', 5);\n\n                seriesShape = series.select('.nv-legend-symbol');\n            } else if (vers == 'furious') {\n                seriesEnter.append('rect')\n                    .style('stroke-width', 2)\n                    .attr('class','nv-legend-symbol')\n                    .attr('rx', 3)\n                    .attr('ry', 3);\n                seriesShape = series.select('.nv-legend-symbol');\n\n                seriesEnter.append('g')\n                    .attr('class', 'nv-check-box')\n                    .property('innerHTML','<path d=\"M0.5,5 L22.5,5 L22.5,26.5 L0.5,26.5 L0.5,5 Z\" class=\"nv-box\"></path><path d=\"M5.5,12.8618467 L11.9185089,19.2803556 L31,0.198864511\" class=\"nv-check\"></path>')\n                    .attr('transform', 'translate(-10,-8)scale(0.5)');\n\n                var seriesCheckbox = series.select('.nv-check-box');\n\n                seriesCheckbox.each(function(d,i) {\n                    d3.select(this).selectAll('path')\n                        .attr('stroke', setTextColor(d,i));\n                });\n            }\n\n            seriesEnter.append('text')\n                .attr('text-anchor', 'start')\n                .attr('class','nv-legend-text')\n                .attr('dy', '.32em')\n                .attr('dx', '8');\n\n            var seriesText = series.select('text.nv-legend-text');\n\n            series\n                .on('mouseover', function(d,i) {\n                    dispatch.legendMouseover(d,i);  //TODO: Make consistent with other event objects\n                })\n                .on('mouseout', function(d,i) {\n                    dispatch.legendMouseout(d,i);\n                })\n                .on('click', function(d,i) {\n                    dispatch.legendClick(d,i);\n                    // make sure we re-get data in case it was modified\n                    var data = series.data();\n                    if (updateState) {\n                        if(vers =='classic') {\n                            if (radioButtonMode) {\n                                //Radio button mode: set every series to disabled,\n                                //  and enable the clicked series.\n                                data.forEach(function(series) { series.disabled = true});\n                                d.disabled = false;\n                            }\n                            else {\n                                d.disabled = !d.disabled;\n                                if (data.every(function(series) { return series.disabled})) {\n                                    //the default behavior of NVD3 legends is, if every single series\n                                    // is disabled, turn all series' back on.\n                                    data.forEach(function(series) { series.disabled = false});\n                                }\n                            }\n                        } else if(vers == 'furious') {\n                            if(expanded) {\n                                d.disengaged = !d.disengaged;\n                                d.userDisabled = d.userDisabled == undefined ? !!d.disabled : d.userDisabled;\n                                d.disabled = d.disengaged || d.userDisabled;\n                            } else if (!expanded) {\n                                d.disabled = !d.disabled;\n                                d.userDisabled = d.disabled;\n                                var engaged = data.filter(function(d) { return !d.disengaged; });\n                                if (engaged.every(function(series) { return series.userDisabled })) {\n                                    //the default behavior of NVD3 legends is, if every single series\n                                    // is disabled, turn all series' back on.\n                                    data.forEach(function(series) {\n                                        series.disabled = series.userDisabled = false;\n                                    });\n                                }\n                            }\n                        }\n                        dispatch.stateChange({\n                            disabled: data.map(function(d) { return !!d.disabled }),\n                            disengaged: data.map(function(d) { return !!d.disengaged })\n                        });\n\n                    }\n                })\n                .on('dblclick', function(d,i) {\n                    if(vers == 'furious' && expanded) return;\n                    dispatch.legendDblclick(d,i);\n                    if (updateState) {\n                        // make sure we re-get data in case it was modified\n                        var data = series.data();\n                        //the default behavior of NVD3 legends, when double clicking one,\n                        // is to set all other series' to false, and make the double clicked series enabled.\n                        data.forEach(function(series) {\n                            series.disabled = true;\n                            if(vers == 'furious') series.userDisabled = series.disabled;\n                        });\n                        d.disabled = false;\n                        if(vers == 'furious') d.userDisabled = d.disabled;\n                        dispatch.stateChange({\n                            disabled: data.map(function(d) { return !!d.disabled })\n                        });\n                    }\n                });\n\n            series.classed('nv-disabled', function(d) { return d.userDisabled });\n            series.exit().remove();\n\n            seriesText\n                .attr('fill', setTextColor)\n                .text(function (d) { return keyFormatter(getKey(d)) });\n\n            //TODO: implement fixed-width and max-width options (max-width is especially useful with the align option)\n            // NEW ALIGNING CODE, TODO: clean up\n            var legendWidth = 0;\n            if (align) {\n\n                var seriesWidths = [];\n                series.each(function(d,i) {\n                    var legendText;\n                    if (keyFormatter(getKey(d)) && keyFormatter(getKey(d)).length > maxKeyLength) {\n                        var trimmedKey = keyFormatter(getKey(d)).substring(0, maxKeyLength);\n                        legendText = d3.select(this).select('text').text(trimmedKey + \"...\");\n                        d3.select(this).append(\"svg:title\").text(keyFormatter(getKey(d)));\n                    } else {\n                        legendText = d3.select(this).select('text');\n                    }\n                    var nodeTextLength;\n                    try {\n                        nodeTextLength = legendText.node().getComputedTextLength();\n                        // If the legendText is display:none'd (nodeTextLength == 0), simulate an error so we approximate, instead\n                        if(nodeTextLength <= 0) throw Error();\n                    }\n                    catch(e) {\n                        nodeTextLength = nv.utils.calcApproxTextWidth(legendText);\n                    }\n\n                    seriesWidths.push(nodeTextLength + padding);\n                });\n\n                var seriesPerRow = 0;\n                var columnWidths = [];\n                legendWidth = 0;\n\n                while ( legendWidth < availableWidth && seriesPerRow < seriesWidths.length) {\n                    columnWidths[seriesPerRow] = seriesWidths[seriesPerRow];\n                    legendWidth += seriesWidths[seriesPerRow++];\n                }\n                if (seriesPerRow === 0) seriesPerRow = 1; //minimum of one series per row\n\n                while ( legendWidth > availableWidth && seriesPerRow > 1 ) {\n                    columnWidths = [];\n                    seriesPerRow--;\n\n                    for (var k = 0; k < seriesWidths.length; k++) {\n                        if (seriesWidths[k] > (columnWidths[k % seriesPerRow] || 0) )\n                            columnWidths[k % seriesPerRow] = seriesWidths[k];\n                    }\n\n                    legendWidth = columnWidths.reduce(function(prev, cur, index, array) {\n                        return prev + cur;\n                    });\n                }\n\n                var xPositions = [];\n                for (var i = 0, curX = 0; i < seriesPerRow; i++) {\n                    xPositions[i] = curX;\n                    curX += columnWidths[i];\n                }\n\n                series\n                    .attr('transform', function(d, i) {\n                        return 'translate(' + xPositions[i % seriesPerRow] + ',' + (5 + Math.floor(i / seriesPerRow) * versPadding) + ')';\n                    });\n\n                //position legend as far right as possible within the total width\n                if (rightAlign) {\n                    g.attr('transform', 'translate(' + (width - margin.right - legendWidth) + ',' + margin.top + ')');\n                }\n                else {\n                    g.attr('transform', 'translate(0' + ',' + margin.top + ')');\n                }\n\n                height = margin.top + margin.bottom + (Math.ceil(seriesWidths.length / seriesPerRow) * versPadding);\n\n            } else {\n\n                var ypos = 5,\n                    newxpos = 5,\n                    maxwidth = 0,\n                    xpos;\n                series\n                    .attr('transform', function(d, i) {\n                        var length = d3.select(this).select('text').node().getComputedTextLength() + padding;\n                        xpos = newxpos;\n\n                        if (width < margin.left + margin.right + xpos + length) {\n                            newxpos = xpos = 5;\n                            ypos += versPadding;\n                        }\n\n                        newxpos += length;\n                        if (newxpos > maxwidth) maxwidth = newxpos;\n\n                        if(legendWidth < xpos + maxwidth) {\n                            legendWidth = xpos + maxwidth;\n                        }\n                        return 'translate(' + xpos + ',' + ypos + ')';\n                    });\n\n                //position legend as far right as possible within the total width\n                g.attr('transform', 'translate(' + (width - margin.right - maxwidth) + ',' + margin.top + ')');\n\n                height = margin.top + margin.bottom + ypos + 15;\n            }\n\n            if(vers == 'furious') {\n                // Size rectangles after text is placed\n                seriesShape\n                    .attr('width', function(d,i) {\n                        return seriesText[0][i].getComputedTextLength() + 27;\n                    })\n                    .attr('height', 18)\n                    .attr('y', -9)\n                    .attr('x', -15);\n\n                // The background for the expanded legend (UI)\n                gEnter.insert('rect',':first-child')\n                    .attr('class', 'nv-legend-bg')\n                    .attr('fill', '#eee')\n                    // .attr('stroke', '#444')\n                    .attr('opacity',0);\n\n                var seriesBG = g.select('.nv-legend-bg');\n\n                seriesBG\n                .transition().duration(300)\n                    .attr('x', -versPadding )\n                    .attr('width', legendWidth + versPadding - 12)\n                    .attr('height', height + 10)\n                    .attr('y', -margin.top - 10)\n                    .attr('opacity', expanded ? 1 : 0);\n\n\n            }\n\n            seriesShape\n                .style('fill', setBGColor)\n                .style('fill-opacity', setBGOpacity)\n                .style('stroke', setBGColor);\n        });\n\n        function setTextColor(d,i) {\n            if(vers != 'furious') return '#000';\n            if(expanded) {\n                return d.disengaged ? '#000' : '#fff';\n            } else if (!expanded) {\n                if(!d.color) d.color = color(d,i);\n                return !!d.disabled ? d.color : '#fff';\n            }\n        }\n\n        function setBGColor(d,i) {\n            if(expanded && vers == 'furious') {\n                return d.disengaged ? '#eee' : d.color || color(d,i);\n            } else {\n                return d.color || color(d,i);\n            }\n        }\n\n\n        function setBGOpacity(d,i) {\n            if(expanded && vers == 'furious') {\n                return 1;\n            } else {\n                return !!d.disabled ? 0 : 1;\n            }\n        }\n\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:          {get: function(){return width;}, set: function(_){width=_;}},\n        height:         {get: function(){return height;}, set: function(_){height=_;}},\n        key:            {get: function(){return getKey;}, set: function(_){getKey=_;}},\n        keyFormatter:   {get: function(){return keyFormatter;}, set: function(_){keyFormatter=_;}},\n        align:          {get: function(){return align;}, set: function(_){align=_;}},\n        maxKeyLength:   {get: function(){return maxKeyLength;}, set: function(_){maxKeyLength=_;}},\n        rightAlign:     {get: function(){return rightAlign;}, set: function(_){rightAlign=_;}},\n        padding:        {get: function(){return padding;}, set: function(_){padding=_;}},\n        updateState:    {get: function(){return updateState;}, set: function(_){updateState=_;}},\n        radioButtonMode:{get: function(){return radioButtonMode;}, set: function(_){radioButtonMode=_;}},\n        expanded:       {get: function(){return expanded;}, set: function(_){expanded=_;}},\n        vers:           {get: function(){return vers;}, set: function(_){vers=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.line = function() {\n    \"use strict\";\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var  scatter = nv.models.scatter()\n        ;\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 960\n        , height = 500\n        , container = null\n        , strokeWidth = 1.5\n        , color = nv.utils.defaultColor() // a function that returns a color\n        , getX = function(d) { return d.x } // accessor to get the x value from a data point\n        , getY = function(d) { return d.y } // accessor to get the y value from a data point\n        , defined = function(d,i) { return !isNaN(getY(d,i)) && getY(d,i) !== null } // allows a line to be not continuous when it is not defined\n        , isArea = function(d) { return d.area } // decides if a line is an area or just a line\n        , clipEdge = false // if true, masks lines within x and y scale\n        , x //can be accessed via chart.xScale()\n        , y //can be accessed via chart.yScale()\n        , interpolate = \"linear\" // controls the line interpolation\n        , duration = 250\n        , dispatch = d3.dispatch('elementClick', 'elementMouseover', 'elementMouseout', 'renderEnd')\n        ;\n\n    scatter\n        .pointSize(16) // default size\n        .pointDomain([16,256]) //set to speed up calculation, needs to be unset if there is a custom size accessor\n    ;\n\n    //============================================================\n\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var x0, y0 //used to store previous scales\n        , renderWatch = nv.utils.renderWatch(dispatch, duration)\n        ;\n\n    //============================================================\n\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(scatter);\n        selection.each(function(data) {\n            container = d3.select(this);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n            nv.utils.initSVG(container);\n\n            // Setup Scales\n            x = scatter.xScale();\n            y = scatter.yScale();\n\n            x0 = x0 || x;\n            y0 = y0 || y;\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-line').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-line');\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-groups');\n            gEnter.append('g').attr('class', 'nv-scatterWrap');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            scatter\n                .width(availableWidth)\n                .height(availableHeight);\n\n            var scatterWrap = wrap.select('.nv-scatterWrap');\n            scatterWrap.call(scatter);\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-edge-clip-' + scatter.id())\n                .append('rect');\n\n            wrap.select('#nv-edge-clip-' + scatter.id() + ' rect')\n                .attr('width', availableWidth)\n                .attr('height', (availableHeight > 0) ? availableHeight : 0);\n\n            g   .attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + scatter.id() + ')' : '');\n            scatterWrap\n                .attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + scatter.id() + ')' : '');\n\n            var groups = wrap.select('.nv-groups').selectAll('.nv-group')\n                .data(function(d) { return d }, function(d) { return d.key });\n            groups.enter().append('g')\n                .style('stroke-opacity', 1e-6)\n                .style('stroke-width', function(d) { return d.strokeWidth || strokeWidth })\n                .style('fill-opacity', 1e-6);\n\n            groups.exit().remove();\n\n            groups\n                .attr('class', function(d,i) {\n                    return (d.classed || '') + ' nv-group nv-series-' + i;\n                })\n                .classed('hover', function(d) { return d.hover })\n                .style('fill', function(d,i){ return color(d, i) })\n                .style('stroke', function(d,i){ return color(d, i)});\n            groups.watchTransition(renderWatch, 'line: groups')\n                .style('stroke-opacity', 1)\n                .style('fill-opacity', function(d) { return d.fillOpacity || .5});\n\n            var areaPaths = groups.selectAll('path.nv-area')\n                .data(function(d) { return isArea(d) ? [d] : [] }); // this is done differently than lines because I need to check if series is an area\n            areaPaths.enter().append('path')\n                .attr('class', 'nv-area')\n                .attr('d', function(d) {\n                    return d3.svg.area()\n                        .interpolate(interpolate)\n                        .defined(defined)\n                        .x(function(d,i) { return nv.utils.NaNtoZero(x0(getX(d,i))) })\n                        .y0(function(d,i) { return nv.utils.NaNtoZero(y0(getY(d,i))) })\n                        .y1(function(d,i) { return y0( y.domain()[0] <= 0 ? y.domain()[1] >= 0 ? 0 : y.domain()[1] : y.domain()[0] ) })\n                        //.y1(function(d,i) { return y0(0) }) //assuming 0 is within y domain.. may need to tweak this\n                        .apply(this, [d.values])\n                });\n            groups.exit().selectAll('path.nv-area')\n                .remove();\n\n            areaPaths.watchTransition(renderWatch, 'line: areaPaths')\n                .attr('d', function(d) {\n                    return d3.svg.area()\n                        .interpolate(interpolate)\n                        .defined(defined)\n                        .x(function(d,i) { return nv.utils.NaNtoZero(x(getX(d,i))) })\n                        .y0(function(d,i) { return nv.utils.NaNtoZero(y(getY(d,i))) })\n                        .y1(function(d,i) { return y( y.domain()[0] <= 0 ? y.domain()[1] >= 0 ? 0 : y.domain()[1] : y.domain()[0] ) })\n                        //.y1(function(d,i) { return y0(0) }) //assuming 0 is within y domain.. may need to tweak this\n                        .apply(this, [d.values])\n                });\n\n            var linePaths = groups.selectAll('path.nv-line')\n                .data(function(d) { return [d.values] });\n\n            linePaths.enter().append('path')\n                .attr('class', 'nv-line')\n                .attr('d',\n                    d3.svg.line()\n                    .interpolate(interpolate)\n                    .defined(defined)\n                    .x(function(d,i) { return nv.utils.NaNtoZero(x0(getX(d,i))) })\n                    .y(function(d,i) { return nv.utils.NaNtoZero(y0(getY(d,i))) })\n            );\n\n            linePaths.watchTransition(renderWatch, 'line: linePaths')\n                .attr('d',\n                    d3.svg.line()\n                    .interpolate(interpolate)\n                    .defined(defined)\n                    .x(function(d,i) { return nv.utils.NaNtoZero(x(getX(d,i))) })\n                    .y(function(d,i) { return nv.utils.NaNtoZero(y(getY(d,i))) })\n            );\n\n            //store old scales for use in transitions on update\n            x0 = x.copy();\n            y0 = y.copy();\n        });\n        renderWatch.renderEnd('line immediate');\n        return chart;\n    }\n\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.scatter = scatter;\n    // Pass through events\n    scatter.dispatch.on('elementClick', function(){ dispatch.elementClick.apply(this, arguments); });\n    scatter.dispatch.on('elementMouseover', function(){ dispatch.elementMouseover.apply(this, arguments); });\n    scatter.dispatch.on('elementMouseout', function(){ dispatch.elementMouseout.apply(this, arguments); });\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        defined: {get: function(){return defined;}, set: function(_){defined=_;}},\n        interpolate:      {get: function(){return interpolate;}, set: function(_){interpolate=_;}},\n        clipEdge:    {get: function(){return clipEdge;}, set: function(_){clipEdge=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            scatter.duration(duration);\n        }},\n        isArea: {get: function(){return isArea;}, set: function(_){\n            isArea = d3.functor(_);\n        }},\n        x: {get: function(){return getX;}, set: function(_){\n            getX = _;\n            scatter.x(_);\n        }},\n        y: {get: function(){return getY;}, set: function(_){\n            getY = _;\n            scatter.y(_);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            scatter.color(color);\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, scatter);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","nv.models.lineChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var lines = nv.models.line()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n        , legend = nv.models.legend()\n        , interactiveLayer = nv.interactiveGuideline()\n        , tooltip = nv.models.tooltip()\n        , focus = nv.models.focus(nv.models.line())\n        ;\n\n    var margin = {top: 30, right: 20, bottom: 50, left: 60}\n        , marginTop = null\n        , color = nv.utils.defaultColor()\n        , width = null\n        , height = null\n        , showLegend = true\n        , legendPosition = 'top'\n        , showXAxis = true\n        , showYAxis = true\n        , rightAlignYAxis = false\n        , useInteractiveGuideline = false\n        , x\n        , y\n        , focusEnable = false\n        , state = nv.utils.state()\n        , defaultState = null\n        , noData = null\n        , dispatch = d3.dispatch('tooltipShow', 'tooltipHide', 'stateChange', 'changeState', 'renderEnd')\n        , duration = 250\n        ;\n\n    // set options on sub-objects for this chart\n    xAxis.orient('bottom').tickPadding(7);\n    yAxis.orient(rightAlignYAxis ? 'right' : 'left');\n\n    lines.clipEdge(true).duration(0);\n\n    tooltip.valueFormatter(function(d, i) {\n        return yAxis.tickFormat()(d, i);\n    }).headerFormatter(function(d, i) {\n        return xAxis.tickFormat()(d, i);\n    });\n\n    interactiveLayer.tooltip.valueFormatter(function(d, i) {\n        return yAxis.tickFormat()(d, i);\n    }).headerFormatter(function(d, i) {\n        return xAxis.tickFormat()(d, i);\n    });\n\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled; })\n            };\n        };\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.active !== undefined)\n                data.forEach(function(series,i) {\n                    series.disabled = !state.active[i];\n                });\n        };\n    };\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(lines);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n\n        selection.each(function(data) {\n            var container = d3.select(this);\n            nv.utils.initSVG(container);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin) - (focusEnable ? focus.height() : 0);\n            chart.update = function() {\n                if( duration === 0 ) {\n                    container.call( chart );\n                } else {\n                    container.transition().duration(duration).call(chart);\n                }\n            };\n            chart.container = this;\n\n            state\n                .setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            // DEPRECATED set state.disabled\n            state.disabled = data.map(function(d) { return !!d.disabled; });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display noData message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length; }).length) {\n                nv.utils.noData(chart, container);\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            /* Update `main' graph on brush update. */\n            focus.dispatch.on(\"onBrush\", function(extent) {\n                onBrush(extent);\n            });\n\n            // Setup Scales\n            x = lines.xScale();\n            y = lines.yScale();\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-lineChart').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-lineChart').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n\n            var focusEnter = gEnter.append('g').attr('class', 'nv-focus');\n            focusEnter.append('g').attr('class', 'nv-background').append('rect');\n            focusEnter.append('g').attr('class', 'nv-x nv-axis');\n            focusEnter.append('g').attr('class', 'nv-y nv-axis');\n            focusEnter.append('g').attr('class', 'nv-linesWrap');\n            focusEnter.append('g').attr('class', 'nv-interactive');\n\n            var contextEnter = gEnter.append('g').attr('class', 'nv-focusWrap');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                legend.width(availableWidth);\n\n                g.select('.nv-legendWrap')\n                    .datum(data)\n                    .call(legend);\n\n                if (legendPosition === 'bottom') {\n                    wrap.select('.nv-legendWrap')\n                        .attr('transform', 'translate(0,' + availableHeight +')');\n                } else if (legendPosition === 'top') {\n                    if (!marginTop && legend.height() !== margin.top) {\n                        margin.top = legend.height();\n                        availableHeight = nv.utils.availableHeight(height, container, margin) - (focusEnable ? focus.height() : 0);\n                    }\n\n                    wrap.select('.nv-legendWrap')\n                        .attr('transform', 'translate(0,' + (-margin.top) +')');\n                }\n            }\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            //Set up interactive layer\n            if (useInteractiveGuideline) {\n                interactiveLayer\n                    .width(availableWidth)\n                    .height(availableHeight)\n                    .margin({left:margin.left, top:margin.top})\n                    .svgContainer(container)\n                    .xScale(x);\n                wrap.select(\".nv-interactive\").call(interactiveLayer);\n            }\n\n            g.select('.nv-focus .nv-background rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            lines\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    return d.color || color(d, i);\n                }).filter(function(d,i) { return !data[i].disabled; }));\n\n            var linesWrap = g.select('.nv-linesWrap')\n                .datum(data.filter(function(d) { return !d.disabled; }));\n\n\n            // Setup Main (Focus) Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    ._ticks(nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize(-availableHeight, 0);\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                    .tickSize( -availableWidth, 0);\n            }\n\n            //============================================================\n            // Update Axes\n            //============================================================\n            function updateXAxis() {\n              if(showXAxis) {\n                g.select('.nv-focus .nv-x.nv-axis')\n                  .transition()\n                  .duration(duration)\n                  .call(xAxis)\n                ;\n              }\n            }\n\n            function updateYAxis() {\n              if(showYAxis) {\n                g.select('.nv-focus .nv-y.nv-axis')\n                  .transition()\n                  .duration(duration)\n                  .call(yAxis)\n                ;\n              }\n            }\n\n            g.select('.nv-focus .nv-x.nv-axis')\n                .attr('transform', 'translate(0,' + availableHeight + ')');\n\n            //============================================================\n            // Update Focus\n            //============================================================\n            if(!focusEnable) {\n                linesWrap.call(lines);\n                updateXAxis();\n                updateYAxis();\n            } else {\n                focus.width(availableWidth);\n                g.select('.nv-focusWrap')\n                    .attr('transform', 'translate(0,' + ( availableHeight + margin.bottom + focus.margin().top) + ')')\n                    .datum(data.filter(function(d) { return !d.disabled; }))\n                    .call(focus);\n                var extent = focus.brush.empty() ? focus.xDomain() : focus.brush.extent();\n                if(extent !== null){\n                    onBrush(extent);\n                }\n            }\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState)\n                    state[key] = newState[key];\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            interactiveLayer.dispatch.on('elementMousemove', function(e) {\n                lines.clearHighlights();\n                var singlePoint, pointIndex, pointXLocation, allData = [];\n                data\n                    .filter(function(series, i) {\n                        series.seriesIndex = i;\n                        return !series.disabled && !series.disableTooltip;\n                    })\n                    .forEach(function(series,i) {\n                        var extent = focusEnable ? (focus.brush.empty() ? focus.xScale().domain() : focus.brush.extent()) : x.domain();\n                        var currentValues = series.values.filter(function(d,i) {\n                            // Checks if the x point is between the extents, handling case where extent[0] is greater than extent[1]\n                            // (e.g. x domain is manually set to reverse the x-axis)\n                            if(extent[0] <= extent[1]) {\n                                return lines.x()(d,i) >= extent[0] && lines.x()(d,i) <= extent[1];\n                            } else {\n                                return lines.x()(d,i) >= extent[1] && lines.x()(d,i) <= extent[0];\n                            }\n                        });\n\n                        pointIndex = nv.interactiveBisect(currentValues, e.pointXValue, lines.x());\n                        var point = currentValues[pointIndex];\n                        var pointYValue = chart.y()(point, pointIndex);\n                        if (pointYValue !== null) {\n                            lines.highlightPoint(i, pointIndex, true);\n                        }\n                        if (point === undefined) return;\n                        if (singlePoint === undefined) singlePoint = point;\n                        if (pointXLocation === undefined) pointXLocation = chart.xScale()(chart.x()(point,pointIndex));\n                        allData.push({\n                            key: series.key,\n                            value: pointYValue,\n                            color: color(series,series.seriesIndex),\n                            data: point\n                        });\n                    });\n                //Highlight the tooltip entry based on which point the mouse is closest to.\n                if (allData.length > 2) {\n                    var yValue = chart.yScale().invert(e.mouseY);\n                    var domainExtent = Math.abs(chart.yScale().domain()[0] - chart.yScale().domain()[1]);\n                    var threshold = 0.03 * domainExtent;\n                    var indexToHighlight = nv.nearestValueIndex(allData.map(function(d){return d.value;}),yValue,threshold);\n                    if (indexToHighlight !== null)\n                        allData[indexToHighlight].highlight = true;\n                }\n\n                var defaultValueFormatter = function(d,i) {\n                    return d == null ? \"N/A\" : yAxis.tickFormat()(d);\n                };\n\n                interactiveLayer.tooltip\n                    .valueFormatter(interactiveLayer.tooltip.valueFormatter() || defaultValueFormatter)\n                    .data({\n                        value: chart.x()( singlePoint,pointIndex ),\n                        index: pointIndex,\n                        series: allData\n                    })();\n\n                interactiveLayer.renderGuideLine(pointXLocation);\n\n            });\n\n            interactiveLayer.dispatch.on('elementClick', function(e) {\n                var pointXLocation, allData = [];\n\n                data.filter(function(series, i) {\n                    series.seriesIndex = i;\n                    return !series.disabled;\n                }).forEach(function(series) {\n                    var pointIndex = nv.interactiveBisect(series.values, e.pointXValue, chart.x());\n                    var point = series.values[pointIndex];\n                    if (typeof point === 'undefined') return;\n                    if (typeof pointXLocation === 'undefined') pointXLocation = chart.xScale()(chart.x()(point,pointIndex));\n                    var yPos = chart.yScale()(chart.y()(point,pointIndex));\n                    allData.push({\n                        point: point,\n                        pointIndex: pointIndex,\n                        pos: [pointXLocation, yPos],\n                        seriesIndex: series.seriesIndex,\n                        series: series\n                    });\n                });\n\n                lines.dispatch.elementClick(allData);\n            });\n\n            interactiveLayer.dispatch.on(\"elementMouseout\",function(e) {\n                lines.clearHighlights();\n            });\n\n            dispatch.on('changeState', function(e) {\n                if (typeof e.disabled !== 'undefined' && data.length === e.disabled.length) {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n\n                    state.disabled = e.disabled;\n                }\n                chart.update();\n            });\n\n            //============================================================\n            // Functions\n            //------------------------------------------------------------\n\n            // Taken from crossfilter (http://square.github.com/crossfilter/)\n            function resizePath(d) {\n                var e = +(d == 'e'),\n                    x = e ? 1 : -1,\n                    y = availableHeight / 3;\n                return 'M' + (0.5 * x) + ',' + y\n                    + 'A6,6 0 0 ' + e + ' ' + (6.5 * x) + ',' + (y + 6)\n                    + 'V' + (2 * y - 6)\n                    + 'A6,6 0 0 ' + e + ' ' + (0.5 * x) + ',' + (2 * y)\n                    + 'Z'\n                    + 'M' + (2.5 * x) + ',' + (y + 8)\n                    + 'V' + (2 * y - 8)\n                    + 'M' + (4.5 * x) + ',' + (y + 8)\n                    + 'V' + (2 * y - 8);\n            }\n\n            function onBrush(extent) {\n                // Update Main (Focus)\n                var focusLinesWrap = g.select('.nv-focus .nv-linesWrap')\n                    .datum(\n                    data.filter(function(d) { return !d.disabled; })\n                        .map(function(d,i) {\n                            return {\n                                key: d.key,\n                                area: d.area,\n                                classed: d.classed,\n                                values: d.values.filter(function(d,i) {\n                                    return lines.x()(d,i) >= extent[0] && lines.x()(d,i) <= extent[1];\n                                }),\n                                disableTooltip: d.disableTooltip\n                            };\n                        })\n                );\n                focusLinesWrap.transition().duration(duration).call(lines);\n\n                // Update Main (Focus) Axes\n                updateXAxis();\n                updateYAxis();\n            }\n        });\n\n        renderWatch.renderEnd('lineChart immediate');\n        return chart;\n    }\n\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    lines.dispatch.on('elementMouseover.tooltip', function(evt) {\n        if(!evt.series.disableTooltip){\n            tooltip.data(evt).hidden(false);\n        }\n    });\n\n    lines.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.lines = lines;\n    chart.legend = legend;\n    chart.focus = focus;\n    chart.xAxis = xAxis;\n    chart.x2Axis = focus.xAxis\n    chart.yAxis = yAxis;\n    chart.y2Axis = focus.yAxis\n    chart.interactiveLayer = interactiveLayer;\n    chart.tooltip = tooltip;\n    chart.state = state;\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        legendPosition: {get: function(){return legendPosition;}, set: function(_){legendPosition=_;}},\n        showXAxis:      {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis:    {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        defaultState:    {get: function(){return defaultState;}, set: function(_){defaultState=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n        // Focus options, mostly passed onto focus model.\n        focusEnable:    {get: function(){return focusEnable;}, set: function(_){focusEnable=_;}},\n        focusHeight:     {get: function(){return focus.height();}, set: function(_){focus.height(_);}},\n        focusShowAxisX:    {get: function(){return focus.showXAxis();}, set: function(_){focus.showXAxis(_);}},\n        focusShowAxisY:    {get: function(){return focus.showYAxis();}, set: function(_){focus.showYAxis(_);}},\n        brushExtent: {get: function(){return focus.brushExtent();}, set: function(_){focus.brushExtent(_);}},\n\n        // options that require extra logic in the setter\n        focusMargin: {get: function(){return focus.margin}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            focus.margin.right  = _.right  !== undefined ? _.right  : focus.margin.right;\n            focus.margin.bottom = _.bottom !== undefined ? _.bottom : focus.margin.bottom;\n            focus.margin.left   = _.left   !== undefined ? _.left   : focus.margin.left;\n        }},\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            lines.duration(duration);\n            focus.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n            lines.color(color);\n            focus.color(color);\n        }},\n        interpolate: {get: function(){return lines.interpolate();}, set: function(_){\n            lines.interpolate(_);\n            focus.interpolate(_);\n        }},\n        xTickFormat: {get: function(){return xAxis.tickFormat();}, set: function(_){\n            xAxis.tickFormat(_);\n            focus.xTickFormat(_);\n        }},\n        yTickFormat: {get: function(){return yAxis.tickFormat();}, set: function(_){\n            yAxis.tickFormat(_);\n            focus.yTickFormat(_);\n        }},\n        x: {get: function(){return lines.x();}, set: function(_){\n            lines.x(_);\n            focus.x(_);\n        }},\n        y: {get: function(){return lines.y();}, set: function(_){\n            lines.y(_);\n            focus.y(_);\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( rightAlignYAxis ? 'right' : 'left');\n        }},\n        useInteractiveGuideline: {get: function(){return useInteractiveGuideline;}, set: function(_){\n            useInteractiveGuideline = _;\n            if (useInteractiveGuideline) {\n                lines.interactive(false);\n                lines.useVoronoi(false);\n            }\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, lines);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n\nnv.models.lineWithFocusChart = function() {\n  return nv.models.lineChart()\n    .margin({ bottom: 30 })\n    .focusEnable( true );\n};\n","nv.models.linePlusBarChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var lines = nv.models.line()\n        , lines2 = nv.models.line()\n        , bars = nv.models.historicalBar()\n        , bars2 = nv.models.historicalBar()\n        , xAxis = nv.models.axis()\n        , x2Axis = nv.models.axis()\n        , y1Axis = nv.models.axis()\n        , y2Axis = nv.models.axis()\n        , y3Axis = nv.models.axis()\n        , y4Axis = nv.models.axis()\n        , legend = nv.models.legend()\n        , brush = d3.svg.brush()\n        , tooltip = nv.models.tooltip()\n        ;\n\n    var margin = {top: 30, right: 30, bottom: 30, left: 60}\n        , marginTop = null\n        , margin2 = {top: 0, right: 30, bottom: 20, left: 60}\n        , width = null\n        , height = null\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , color = nv.utils.defaultColor()\n        , showLegend = true\n        , focusEnable = true\n        , focusShowAxisY = false\n        , focusShowAxisX = true\n        , focusHeight = 50\n        , extent\n        , brushExtent = null\n        , x\n        , x2\n        , y1\n        , y2\n        , y3\n        , y4\n        , noData = null\n        , dispatch = d3.dispatch('brush', 'stateChange', 'changeState')\n        , transitionDuration = 0\n        , state = nv.utils.state()\n        , defaultState = null\n        , legendLeftAxisHint = ' (left axis)'\n        , legendRightAxisHint = ' (right axis)'\n        , switchYAxisOrder = false\n        ;\n\n    lines.clipEdge(true);\n    lines2.interactive(false);\n    // We don't want any points emitted for the focus chart's scatter graph.\n    lines2.pointActive(function(d) { return false });\n    xAxis.orient('bottom').tickPadding(5);\n    y1Axis.orient('left');\n    y2Axis.orient('right');\n    x2Axis.orient('bottom').tickPadding(5);\n    y3Axis.orient('left');\n    y4Axis.orient('right');\n\n    tooltip.headerEnabled(true).headerFormatter(function(d, i) {\n        return xAxis.tickFormat()(d, i);\n    });\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var getBarsAxis = function() {\n        return switchYAxisOrder\n            ? { main: y2Axis, focus: y4Axis }\n            : { main: y1Axis, focus: y3Axis }\n    }\n\n    var getLinesAxis = function() {\n        return switchYAxisOrder\n            ? { main: y1Axis, focus: y3Axis }\n            : { main: y2Axis, focus: y4Axis }\n    }\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled })\n            };\n        }\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.active !== undefined)\n                data.forEach(function(series,i) {\n                    series.disabled = !state.active[i];\n                });\n        }\n    };\n\n    var allDisabled = function(data) {\n      return data.every(function(series) {\n        return series.disabled;\n      });\n    }\n\n    function chart(selection) {\n        selection.each(function(data) {\n            var container = d3.select(this),\n                that = this;\n            nv.utils.initSVG(container);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight1 = nv.utils.availableHeight(height, container, margin)\n                    - (focusEnable ? focusHeight : 0),\n                availableHeight2 = focusHeight - margin2.top - margin2.bottom;\n\n            chart.update = function() { container.transition().duration(transitionDuration).call(chart); };\n            chart.container = this;\n\n            state\n                .setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            // DEPRECATED set state.disableddisabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            var dataBars = data.filter(function(d) { return !d.disabled && d.bar });\n            var dataLines = data.filter(function(d) { return !d.bar }); // removed the !d.disabled clause here to fix Issue #240\n\n            if (dataBars.length && !switchYAxisOrder) {\n                x = bars.xScale();\n            } else {\n                x = lines.xScale();\n            }\n\n            x2 = x2Axis.scale();\n\n            // select the scales and series based on the position of the yAxis\n            y1 = switchYAxisOrder ? lines.yScale() : bars.yScale();\n            y2 = switchYAxisOrder ? bars.yScale() : lines.yScale();\n            y3 = switchYAxisOrder ? lines2.yScale() : bars2.yScale();\n            y4 = switchYAxisOrder ? bars2.yScale() : lines2.yScale();\n\n            var series1 = data\n                .filter(function(d) { return !d.disabled && (switchYAxisOrder ? !d.bar : d.bar) })\n                .map(function(d) {\n                    return d.values.map(function(d,i) {\n                        return { x: getX(d,i), y: getY(d,i) }\n                    })\n                });\n\n            var series2 = data\n                .filter(function(d) { return !d.disabled && (switchYAxisOrder ? d.bar : !d.bar) })\n                .map(function(d) {\n                    return d.values.map(function(d,i) {\n                        return { x: getX(d,i), y: getY(d,i) }\n                    })\n                });\n\n            x.range([0, availableWidth]);\n\n            x2  .domain(d3.extent(d3.merge(series1.concat(series2)), function(d) { return d.x } ))\n                .range([0, availableWidth]);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-linePlusBar').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-linePlusBar').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n\n            // this is the main chart\n            var focusEnter = gEnter.append('g').attr('class', 'nv-focus');\n            focusEnter.append('g').attr('class', 'nv-x nv-axis');\n            focusEnter.append('g').attr('class', 'nv-y1 nv-axis');\n            focusEnter.append('g').attr('class', 'nv-y2 nv-axis');\n            focusEnter.append('g').attr('class', 'nv-barsWrap');\n            focusEnter.append('g').attr('class', 'nv-linesWrap');\n\n            // context chart is where you can focus in\n            var contextEnter = gEnter.append('g').attr('class', 'nv-context');\n            contextEnter.append('g').attr('class', 'nv-x nv-axis');\n            contextEnter.append('g').attr('class', 'nv-y1 nv-axis');\n            contextEnter.append('g').attr('class', 'nv-y2 nv-axis');\n            contextEnter.append('g').attr('class', 'nv-barsWrap');\n            contextEnter.append('g').attr('class', 'nv-linesWrap');\n            contextEnter.append('g').attr('class', 'nv-brushBackground');\n            contextEnter.append('g').attr('class', 'nv-x nv-brush');\n\n            //============================================================\n            // Legend\n            //------------------------------------------------------------\n\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                var legendWidth = legend.align() ? availableWidth / 2 : availableWidth;\n                var legendXPosition = legend.align() ? legendWidth : 0;\n\n                legend.width(legendWidth);\n\n                g.select('.nv-legendWrap')\n                    .datum(data.map(function(series) {\n                        series.originalKey = series.originalKey === undefined ? series.key : series.originalKey;\n                        if(switchYAxisOrder) {\n                            series.key = series.originalKey + (series.bar ? legendRightAxisHint : legendLeftAxisHint);\n                        } else {\n                            series.key = series.originalKey + (series.bar ? legendLeftAxisHint : legendRightAxisHint);\n                        }\n                        return series;\n                    }))\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    // FIXME: shouldn't this be \"- (focusEnabled ? focusHeight : 0)\"?\n                    availableHeight1 = nv.utils.availableHeight(height, container, margin) - focusHeight;\n                }\n\n                g.select('.nv-legendWrap')\n                    .attr('transform', 'translate(' + legendXPosition + ',' + (-margin.top) +')');\n            }\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            //============================================================\n            // Context chart (focus chart) components\n            //------------------------------------------------------------\n\n            // hide or show the focus context chart\n            g.select('.nv-context').style('display', focusEnable ? 'initial' : 'none');\n\n            bars2\n                .width(availableWidth)\n                .height(availableHeight2)\n                .color(data.map(function (d, i) {\n                    return d.color || color(d, i);\n                }).filter(function (d, i) {\n                    return !data[i].disabled && data[i].bar\n                }));\n            lines2\n                .width(availableWidth)\n                .height(availableHeight2)\n                .color(data.map(function (d, i) {\n                    return d.color || color(d, i);\n                }).filter(function (d, i) {\n                    return !data[i].disabled && !data[i].bar\n                }));\n\n            var bars2Wrap = g.select('.nv-context .nv-barsWrap')\n                .datum(dataBars.length ? dataBars : [\n                    {values: []}\n                ]);\n            var lines2Wrap = g.select('.nv-context .nv-linesWrap')\n                .datum(allDisabled(dataLines) ?\n                       [{values: []}] :\n                       dataLines.filter(function(dataLine) {\n                         return !dataLine.disabled;\n                       }));\n\n            g.select('.nv-context')\n                .attr('transform', 'translate(0,' + ( availableHeight1 + margin.bottom + margin2.top) + ')');\n\n            bars2Wrap.transition().call(bars2);\n            lines2Wrap.transition().call(lines2);\n\n            // context (focus chart) axis controls\n            if (focusShowAxisX) {\n                x2Axis\n                    ._ticks( nv.utils.calcTicksX(availableWidth / 100, data))\n                    .tickSize(-availableHeight2, 0);\n                g.select('.nv-context .nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + y3.range()[0] + ')');\n                g.select('.nv-context .nv-x.nv-axis').transition()\n                    .call(x2Axis);\n            }\n\n            if (focusShowAxisY) {\n                y3Axis\n                    .scale(y3)\n                    ._ticks( availableHeight2 / 36 )\n                    .tickSize( -availableWidth, 0);\n                y4Axis\n                    .scale(y4)\n                    ._ticks( availableHeight2 / 36 )\n                    .tickSize(dataBars.length ? 0 : -availableWidth, 0); // Show the y2 rules only if y1 has none\n\n                g.select('.nv-context .nv-y3.nv-axis')\n                    .style('opacity', dataBars.length ? 1 : 0)\n                    .attr('transform', 'translate(0,' + x2.range()[0] + ')');\n                g.select('.nv-context .nv-y2.nv-axis')\n                    .style('opacity', dataLines.length ? 1 : 0)\n                    .attr('transform', 'translate(' + x2.range()[1] + ',0)');\n\n                g.select('.nv-context .nv-y1.nv-axis').transition()\n                    .call(y3Axis);\n                g.select('.nv-context .nv-y2.nv-axis').transition()\n                    .call(y4Axis);\n            }\n\n            // Setup Brush\n            brush.x(x2).on('brush', onBrush);\n\n            if (brushExtent) brush.extent(brushExtent);\n\n            var brushBG = g.select('.nv-brushBackground').selectAll('g')\n                .data([brushExtent || brush.extent()]);\n\n            var brushBGenter = brushBG.enter()\n                .append('g');\n\n            brushBGenter.append('rect')\n                .attr('class', 'left')\n                .attr('x', 0)\n                .attr('y', 0)\n                .attr('height', availableHeight2);\n\n            brushBGenter.append('rect')\n                .attr('class', 'right')\n                .attr('x', 0)\n                .attr('y', 0)\n                .attr('height', availableHeight2);\n\n            var gBrush = g.select('.nv-x.nv-brush')\n                .call(brush);\n            gBrush.selectAll('rect')\n                //.attr('y', -5)\n                .attr('height', availableHeight2);\n            gBrush.selectAll('.resize').append('path').attr('d', resizePath);\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState)\n                    state[key] = newState[key];\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            // Update chart from a state object passed to event handler\n            dispatch.on('changeState', function(e) {\n                if (typeof e.disabled !== 'undefined') {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n                    state.disabled = e.disabled;\n                }\n                chart.update();\n            });\n\n            //============================================================\n            // Functions\n            //------------------------------------------------------------\n\n            // Taken from crossfilter (http://square.github.com/crossfilter/)\n            function resizePath(d) {\n                var e = +(d == 'e'),\n                    x = e ? 1 : -1,\n                    y = availableHeight2 / 3;\n                return 'M' + (.5 * x) + ',' + y\n                    + 'A6,6 0 0 ' + e + ' ' + (6.5 * x) + ',' + (y + 6)\n                    + 'V' + (2 * y - 6)\n                    + 'A6,6 0 0 ' + e + ' ' + (.5 * x) + ',' + (2 * y)\n                    + 'Z'\n                    + 'M' + (2.5 * x) + ',' + (y + 8)\n                    + 'V' + (2 * y - 8)\n                    + 'M' + (4.5 * x) + ',' + (y + 8)\n                    + 'V' + (2 * y - 8);\n            }\n\n\n            function updateBrushBG() {\n                if (!brush.empty()) brush.extent(brushExtent);\n                brushBG\n                    .data([brush.empty() ? x2.domain() : brushExtent])\n                    .each(function(d,i) {\n                        var leftWidth = x2(d[0]) - x2.range()[0],\n                            rightWidth = x2.range()[1] - x2(d[1]);\n                        d3.select(this).select('.left')\n                            .attr('width',  leftWidth < 0 ? 0 : leftWidth);\n\n                        d3.select(this).select('.right')\n                            .attr('x', x2(d[1]))\n                            .attr('width', rightWidth < 0 ? 0 : rightWidth);\n                    });\n            }\n\n            function onBrush() {\n                brushExtent = brush.empty() ? null : brush.extent();\n                extent = brush.empty() ? x2.domain() : brush.extent();\n                dispatch.brush({extent: extent, brush: brush});\n                updateBrushBG();\n\n                // Prepare Main (Focus) Bars and Lines\n                bars\n                    .width(availableWidth)\n                    .height(availableHeight1)\n                    .color(data.map(function(d,i) {\n                        return d.color || color(d, i);\n                    }).filter(function(d,i) { return !data[i].disabled && data[i].bar }));\n\n                lines\n                    .width(availableWidth)\n                    .height(availableHeight1)\n                    .color(data.map(function(d,i) {\n                        return d.color || color(d, i);\n                    }).filter(function(d,i) { return !data[i].disabled && !data[i].bar }));\n\n                var focusBarsWrap = g.select('.nv-focus .nv-barsWrap')\n                    .datum(!dataBars.length ? [{values:[]}] :\n                        dataBars\n                            .map(function(d,i) {\n                                return {\n                                    key: d.key,\n                                    values: d.values.filter(function(d,i) {\n                                        return bars.x()(d,i) >= extent[0] && bars.x()(d,i) <= extent[1];\n                                    })\n                                }\n                            })\n                );\n\n                var focusLinesWrap = g.select('.nv-focus .nv-linesWrap')\n                    .datum(allDisabled(dataLines) ? [{values:[]}] :\n                           dataLines\n                           .filter(function(dataLine) { return !dataLine.disabled; })\n                           .map(function(d,i) {\n                                return {\n                                    area: d.area,\n                                    fillOpacity: d.fillOpacity,\n                                    strokeWidth: d.strokeWidth,\n                                    key: d.key,\n                                    values: d.values.filter(function(d,i) {\n                                        return lines.x()(d,i) >= extent[0] && lines.x()(d,i) <= extent[1];\n                                    })\n                                }\n                            })\n                );\n\n                // Update Main (Focus) X Axis\n                if (dataBars.length && !switchYAxisOrder) {\n                    x = bars.xScale();\n                } else {\n                    x = lines.xScale();\n                }\n\n                xAxis\n                    .scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize(-availableHeight1, 0);\n\n                xAxis.domain([Math.ceil(extent[0]), Math.floor(extent[1])]);\n\n                g.select('.nv-x.nv-axis').transition().duration(transitionDuration)\n                    .call(xAxis);\n\n                // Update Main (Focus) Bars and Lines\n                focusBarsWrap.transition().duration(transitionDuration).call(bars);\n                focusLinesWrap.transition().duration(transitionDuration).call(lines);\n\n                // Setup and Update Main (Focus) Y Axes\n                g.select('.nv-focus .nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + y1.range()[0] + ')');\n\n                y1Axis\n                    .scale(y1)\n                    ._ticks( nv.utils.calcTicksY(availableHeight1/36, data) )\n                    .tickSize(-availableWidth, 0);\n                y2Axis\n                    .scale(y2)\n                    ._ticks( nv.utils.calcTicksY(availableHeight1/36, data) );\n\n                // Show the y2 rules only if y1 has none\n                if(!switchYAxisOrder) {\n                    y2Axis.tickSize(dataBars.length ? 0 : -availableWidth, 0);\n                } else {\n                    y2Axis.tickSize(dataLines.length ? 0 : -availableWidth, 0);\n                }\n\n                // Calculate opacity of the axis\n                var barsOpacity = dataBars.length ? 1 : 0;\n                var linesOpacity = dataLines.length && !allDisabled(dataLines) ? 1 : 0;\n\n                var y1Opacity = switchYAxisOrder ? linesOpacity : barsOpacity;\n                var y2Opacity = switchYAxisOrder ? barsOpacity : linesOpacity;\n\n                g.select('.nv-focus .nv-y1.nv-axis')\n                    .style('opacity', y1Opacity);\n                g.select('.nv-focus .nv-y2.nv-axis')\n                    .style('opacity', y2Opacity)\n                    .attr('transform', 'translate(' + x.range()[1] + ',0)');\n\n                g.select('.nv-focus .nv-y1.nv-axis').transition().duration(transitionDuration)\n                    .call(y1Axis);\n                g.select('.nv-focus .nv-y2.nv-axis').transition().duration(transitionDuration)\n                    .call(y2Axis);\n            }\n\n            onBrush();\n\n        });\n\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    lines.dispatch.on('elementMouseover.tooltip', function(evt) {\n        tooltip\n            .duration(100)\n            .valueFormatter(function(d, i) {\n                return getLinesAxis().main.tickFormat()(d, i);\n            })\n            .data(evt)\n            .hidden(false);\n    });\n\n    lines.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true)\n    });\n\n    bars.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt.value = chart.x()(evt.data);\n        evt['series'] = {\n            value: chart.y()(evt.data),\n            color: evt.color\n        };\n        tooltip\n            .duration(0)\n            .valueFormatter(function(d, i) {\n                return getBarsAxis().main.tickFormat()(d, i);\n            })\n            .data(evt)\n            .hidden(false);\n    });\n\n    bars.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    bars.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.legend = legend;\n    chart.lines = lines;\n    chart.lines2 = lines2;\n    chart.bars = bars;\n    chart.bars2 = bars2;\n    chart.xAxis = xAxis;\n    chart.x2Axis = x2Axis;\n    chart.y1Axis = y1Axis;\n    chart.y2Axis = y2Axis;\n    chart.y3Axis = y3Axis;\n    chart.y4Axis = y4Axis;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        brushExtent:    {get: function(){return brushExtent;}, set: function(_){brushExtent=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n        focusEnable:    {get: function(){return focusEnable;}, set: function(_){focusEnable=_;}},\n        focusHeight:    {get: function(){return focusHeight;}, set: function(_){focusHeight=_;}},\n        focusShowAxisX:    {get: function(){return focusShowAxisX;}, set: function(_){focusShowAxisX=_;}},\n        focusShowAxisY:    {get: function(){return focusShowAxisY;}, set: function(_){focusShowAxisY=_;}},\n        legendLeftAxisHint:    {get: function(){return legendLeftAxisHint;}, set: function(_){legendLeftAxisHint=_;}},\n        legendRightAxisHint:    {get: function(){return legendRightAxisHint;}, set: function(_){legendRightAxisHint=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        focusMargin: {get: function(){return margin2;}, set: function(_){\n            margin2.top    = _.top    !== undefined ? _.top    : margin2.top;\n            margin2.right  = _.right  !== undefined ? _.right  : margin2.right;\n            margin2.bottom = _.bottom !== undefined ? _.bottom : margin2.bottom;\n            margin2.left   = _.left   !== undefined ? _.left   : margin2.left;\n        }},\n        duration: {get: function(){return transitionDuration;}, set: function(_){\n            transitionDuration = _;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n        }},\n        x: {get: function(){return getX;}, set: function(_){\n            getX = _;\n            lines.x(_);\n            lines2.x(_);\n            bars.x(_);\n            bars2.x(_);\n        }},\n        y: {get: function(){return getY;}, set: function(_){\n            getY = _;\n            lines.y(_);\n            lines2.y(_);\n            bars.y(_);\n            bars2.y(_);\n        }},\n        switchYAxisOrder:    {get: function(){return switchYAxisOrder;}, set: function(_){\n            // Switch the tick format for the yAxis\n            if(switchYAxisOrder !== _) {\n                var y1 = y1Axis;\n                y1Axis = y2Axis;\n                y2Axis = y1;\n\n                var y3 = y3Axis;\n                y3Axis = y4Axis;\n                y4Axis = y3;\n            }\n            switchYAxisOrder=_;\n\n            y1Axis.orient('left');\n            y2Axis.orient('right');\n            y3Axis.orient('left');\n            y4Axis.orient('right');\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, lines);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.multiBar = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 960\n        , height = 500\n        , x = d3.scale.ordinal()\n        , y = d3.scale.linear()\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container = null\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , forceY = [0] // 0 is forced by default.. this makes sense for the majority of bar graphs... user can always do chart.forceY([]) to remove\n        , clipEdge = true\n        , stacked = false\n        , stackOffset = 'zero' // options include 'silhouette', 'wiggle', 'expand', 'zero', or a custom function\n        , color = nv.utils.defaultColor()\n        , hideable = false\n        , barColor = null // adding the ability to set the color for each rather than the whole group\n        , disabled // used in conjunction with barColor to communicate from multiBarHorizontalChart what series are disabled\n        , duration = 500\n        , xDomain\n        , yDomain\n        , xRange\n        , yRange\n        , groupSpacing = 0.1\n        , fillOpacity = 0.75\n        , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'elementMousemove', 'renderEnd')\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var x0, y0 //used to store previous scales\n        , renderWatch = nv.utils.renderWatch(dispatch, duration)\n        ;\n\n    var last_datalength = 0;\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                availableHeight = height - margin.top - margin.bottom;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n            var nonStackableCount = 0;\n            // This function defines the requirements for render complete\n            var endFn = function(d, i) {\n                if (d.series === data.length - 1 && i === data[0].values.length - 1)\n                    return true;\n                return false;\n            };\n\n            if(hideable && data.length) hideable = [{\n                values: data[0].values.map(function(d) {\n                        return {\n                            x: d.x,\n                            y: 0,\n                            series: d.series,\n                            size: 0.01\n                        };}\n                )}];\n\n            if (stacked) {\n                var parsed = d3.layout.stack()\n                    .offset(stackOffset)\n                    .values(function(d){ return d.values })\n                    .y(getY)\n                (!data.length && hideable ? hideable : data);\n\n                parsed.forEach(function(series, i){\n                    // if series is non-stackable, use un-parsed data\n                    if (series.nonStackable) {\n                        data[i].nonStackableSeries = nonStackableCount++;\n                        parsed[i] = data[i];\n                    } else {\n                        // don't stack this seires on top of the nonStackable seriees\n                        if (i > 0 && parsed[i - 1].nonStackable){\n                            parsed[i].values.map(function(d,j){\n                                d.y0 -= parsed[i - 1].values[j].y;\n                                d.y1 = d.y0 + d.y;\n                            });\n                        }\n                    }\n                });\n                data = parsed;\n            }\n            //add series index and key to each data point for reference\n            data.forEach(function(series, i) {\n                series.values.forEach(function(point) {\n                    point.series = i;\n                    point.key = series.key;\n                });\n            });\n\n            // HACK for negative value stacking\n            if (stacked && data.length > 0) {\n                data[0].values.map(function(d,i) {\n                    var posBase = 0, negBase = 0;\n                    data.map(function(d, idx) {\n                        if (!data[idx].nonStackable) {\n                            var f = d.values[i]\n                            f.size = Math.abs(f.y);\n                            if (f.y<0)  {\n                                f.y1 = negBase;\n                                negBase = negBase - f.size;\n                            } else\n                            {\n                                f.y1 = f.size + posBase;\n                                posBase = posBase + f.size;\n                            }\n                        }\n\n                    });\n                });\n            }\n            // Setup Scales\n            // remap and flatten the data for use in calculating the scales' domains\n            var seriesData = (xDomain && yDomain) ? [] : // if we know xDomain and yDomain, no need to calculate\n                data.map(function(d, idx) {\n                    return d.values.map(function(d,i) {\n                        return { x: getX(d,i), y: getY(d,i), y0: d.y0, y1: d.y1, idx:idx }\n                    })\n                });\n\n            x.domain(xDomain || d3.merge(seriesData).map(function(d) { return d.x }))\n                .rangeBands(xRange || [0, availableWidth], groupSpacing);\n\n            y.domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) {\n                var domain = d.y;\n                // increase the domain range if this series is stackable\n                if (stacked && !data[d.idx].nonStackable) {\n                    if (d.y > 0){\n                        domain = d.y1\n                    } else {\n                        domain = d.y1 + d.y\n                    }\n                }\n                return domain;\n            }).concat(forceY)))\n            .range(yRange || [availableHeight, 0]);\n\n            // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point\n            if (x.domain()[0] === x.domain()[1])\n                x.domain()[0] ?\n                    x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])\n                    : x.domain([-1,1]);\n\n            if (y.domain()[0] === y.domain()[1])\n                y.domain()[0] ?\n                    y.domain([y.domain()[0] + y.domain()[0] * 0.01, y.domain()[1] - y.domain()[1] * 0.01])\n                    : y.domain([-1,1]);\n\n            x0 = x0 || x;\n            y0 = y0 || y;\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-multibar').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multibar');\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-groups');\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-edge-clip-' + id)\n                .append('rect');\n            wrap.select('#nv-edge-clip-' + id + ' rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            g.attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + id + ')' : '');\n\n            var groups = wrap.select('.nv-groups').selectAll('.nv-group')\n                .data(function(d) { return d }, function(d,i) { return i });\n            groups.enter().append('g')\n                .style('stroke-opacity', 1e-6)\n                .style('fill-opacity', 1e-6);\n\n            var exitTransition = renderWatch\n                .transition(groups.exit().selectAll('rect.nv-bar'), 'multibarExit', Math.min(100, duration))\n                .attr('y', function(d, i, j) {\n                    var yVal = y0(0) || 0;\n                    if (stacked) {\n                        if (data[d.series] && !data[d.series].nonStackable) {\n                            yVal = y0(d.y0);\n                        }\n                    }\n                    return yVal;\n                })\n                .attr('height', 0)\n                .remove();\n            if (exitTransition.delay)\n                exitTransition.delay(function(d,i) {\n                    var delay = i * (duration / (last_datalength + 1)) - i;\n                    return delay;\n                });\n            groups\n                .attr('class', function(d,i) { return 'nv-group nv-series-' + i })\n                .classed('hover', function(d) { return d.hover })\n                .style('fill', function(d,i){ return color(d, i) })\n                .style('stroke', function(d,i){ return color(d, i) });\n            groups\n                .style('stroke-opacity', 1)\n                .style('fill-opacity', fillOpacity);\n\n            var bars = groups.selectAll('rect.nv-bar')\n                .data(function(d) { return (hideable && !data.length) ? hideable.values : d.values });\n            bars.exit().remove();\n\n            var barsEnter = bars.enter().append('rect')\n                    .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive'})\n                    .attr('x', function(d,i,j) {\n                        return stacked && !data[j].nonStackable ? 0 : (j * x.rangeBand() / data.length )\n                    })\n                    .attr('y', function(d,i,j) { return y0(stacked && !data[j].nonStackable ? d.y0 : 0) || 0 })\n                    .attr('height', 0)\n                    .attr('width', function(d,i,j) { return x.rangeBand() / (stacked && !data[j].nonStackable ? 1 : data.length) })\n                    .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',0)'; })\n                ;\n            bars\n                .style('fill', function(d,i,j){ return color(d, j, i);  })\n                .style('stroke', function(d,i,j){ return color(d, j, i); })\n                .on('mouseover', function(d,i) { //TODO: figure out why j works above, but not here\n                    d3.select(this).classed('hover', true);\n                    dispatch.elementMouseover({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mouseout', function(d,i) {\n                    d3.select(this).classed('hover', false);\n                    dispatch.elementMouseout({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mousemove', function(d,i) {\n                    dispatch.elementMousemove({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('click', function(d,i) {\n                    var element = this;\n                    dispatch.elementClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\"),\n                        event: d3.event,\n                        element: element\n                    });\n                    d3.event.stopPropagation();\n                })\n                .on('dblclick', function(d,i) {\n                    dispatch.elementDblClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                    d3.event.stopPropagation();\n                });\n            bars\n                .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive'})\n                .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',0)'; })\n\n            if (barColor) {\n                if (!disabled) disabled = data.map(function() { return true });\n                bars\n                    .style('fill', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); })\n                    .style('stroke', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); });\n            }\n\n            var barSelection =\n                bars.watchTransition(renderWatch, 'multibar', Math.min(250, duration))\n                    .delay(function(d,i) {\n                        return i * duration / data[0].values.length;\n                    });\n            if (stacked){\n                barSelection\n                    .attr('y', function(d,i,j) {\n                        var yVal = 0;\n                        // if stackable, stack it on top of the previous series\n                        if (!data[j].nonStackable) {\n                            yVal = y(d.y1);\n                        } else {\n                            if (getY(d,i) < 0){\n                                yVal = y(0);\n                            } else {\n                                if (y(0) - y(getY(d,i)) < -1){\n                                    yVal = y(0) - 1;\n                                } else {\n                                    yVal = y(getY(d, i)) || 0;\n                                }\n                            }\n                        }\n                        return yVal;\n                    })\n                    .attr('height', function(d,i,j) {\n                        if (!data[j].nonStackable) {\n                            return Math.max(Math.abs(y(d.y+d.y0) - y(d.y0)), 0);\n                        } else {\n                            return Math.max(Math.abs(y(getY(d,i)) - y(0)), 0) || 0;\n                        }\n                    })\n                    .attr('x', function(d,i,j) {\n                        var width = 0;\n                        if (data[j].nonStackable) {\n                            width = d.series * x.rangeBand() / data.length;\n                            if (data.length !== nonStackableCount){\n                                width = data[j].nonStackableSeries * x.rangeBand()/(nonStackableCount*2);\n                            }\n                        }\n                        return width;\n                    })\n                    .attr('width', function(d,i,j){\n                        if (!data[j].nonStackable) {\n                            return x.rangeBand();\n                        } else {\n                            // if all series are nonStacable, take the full width\n                            var width = (x.rangeBand() / nonStackableCount);\n                            // otherwise, nonStackable graph will be only taking the half-width\n                            // of the x rangeBand\n                            if (data.length !== nonStackableCount) {\n                                width = x.rangeBand()/(nonStackableCount*2);\n                            }\n                            return width;\n                        }\n                    });\n            }\n            else {\n                barSelection\n                    .attr('x', function(d,i) {\n                        return d.series * x.rangeBand() / data.length;\n                    })\n                    .attr('width', x.rangeBand() / data.length)\n                    .attr('y', function(d,i) {\n                        return getY(d,i) < 0 ?\n                            y(0) :\n                                y(0) - y(getY(d,i)) < 1 ?\n                            y(0) - 1 :\n                            y(getY(d,i)) || 0;\n                    })\n                    .attr('height', function(d,i) {\n                        return Math.max(Math.abs(y(getY(d,i)) - y(0)),1) || 0;\n                    });\n            }\n\n            //store old scales for use in transitions on update\n            x0 = x.copy();\n            y0 = y.copy();\n\n            // keep track of the last data value length for transition calculations\n            if (data[0] && data[0].values) {\n                last_datalength = data[0].values.length;\n            }\n\n        });\n\n        renderWatch.renderEnd('multibar immediate');\n\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:   {get: function(){return width;}, set: function(_){width=_;}},\n        height:  {get: function(){return height;}, set: function(_){height=_;}},\n        x:       {get: function(){return getX;}, set: function(_){getX=_;}},\n        y:       {get: function(){return getY;}, set: function(_){getY=_;}},\n        xScale:  {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:  {get: function(){return y;}, set: function(_){y=_;}},\n        xDomain: {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain: {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:  {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:  {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        forceY:  {get: function(){return forceY;}, set: function(_){forceY=_;}},\n        stacked: {get: function(){return stacked;}, set: function(_){stacked=_;}},\n        stackOffset: {get: function(){return stackOffset;}, set: function(_){stackOffset=_;}},\n        clipEdge:    {get: function(){return clipEdge;}, set: function(_){clipEdge=_;}},\n        disabled:    {get: function(){return disabled;}, set: function(_){disabled=_;}},\n        id:          {get: function(){return id;}, set: function(_){id=_;}},\n        hideable:    {get: function(){return hideable;}, set: function(_){hideable=_;}},\n        groupSpacing:{get: function(){return groupSpacing;}, set: function(_){groupSpacing=_;}},\n        fillOpacity: {get: function(){return fillOpacity;}, set: function(_){fillOpacity=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        barColor:  {get: function(){return barColor;}, set: function(_){\n            barColor = _ ? nv.utils.getColor(_) : null;\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","nv.models.multiBarChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var multibar = nv.models.multiBar()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n        , interactiveLayer = nv.interactiveGuideline()\n        , legend = nv.models.legend()\n        , controls = nv.models.legend()\n        , tooltip = nv.models.tooltip()\n        ;\n\n    var margin = {top: 30, right: 20, bottom: 50, left: 60}\n        , marginTop = null\n        , width = null\n        , height = null\n        , color = nv.utils.defaultColor()\n        , showControls = true\n        , controlLabels = {}\n        , showLegend = true\n        , showXAxis = true\n        , showYAxis = true\n        , rightAlignYAxis = false\n        , reduceXTicks = true // if false a tick will show for every data point\n        , staggerLabels = false\n        , wrapLabels = false\n        , rotateLabels = 0\n        , x //can be accessed via chart.xScale()\n        , y //can be accessed via chart.yScale()\n        , state = nv.utils.state()\n        , defaultState = null\n        , noData = null\n        , dispatch = d3.dispatch('stateChange', 'changeState', 'renderEnd')\n        , controlWidth = function() { return showControls ? 180 : 0 }\n        , duration = 250\n        , useInteractiveGuideline = false\n        ;\n\n    state.stacked = false // DEPRECATED Maintained for backward compatibility\n\n    multibar.stacked(false);\n    xAxis\n        .orient('bottom')\n        .tickPadding(7)\n        .showMaxMin(false)\n        .tickFormat(function(d) { return d })\n    ;\n    yAxis\n        .orient((rightAlignYAxis) ? 'right' : 'left')\n        .tickFormat(d3.format(',.1f'))\n    ;\n\n    tooltip\n        .duration(0)\n        .valueFormatter(function(d, i) {\n            return yAxis.tickFormat()(d, i);\n        })\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        });\n\n    interactiveLayer.tooltip\n        .valueFormatter(function(d, i) {\n            return d == null ? \"N/A\" : yAxis.tickFormat()(d, i);\n        })\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        });\n\n    interactiveLayer.tooltip\n        .valueFormatter(function (d, i) {\n            return d == null ? \"N/A\" : yAxis.tickFormat()(d, i);\n        })\n        .headerFormatter(function (d, i) {\n            return xAxis.tickFormat()(d, i);\n        });\n\n    interactiveLayer.tooltip\n        .duration(0)\n        .valueFormatter(function(d, i) {\n            return yAxis.tickFormat()(d, i);\n        })\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        });\n\n    controls.updateState(false);\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n    var stacked = false;\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled }),\n                stacked: stacked\n            };\n        }\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.stacked !== undefined)\n                stacked = state.stacked;\n            if (state.active !== undefined)\n                data.forEach(function(series,i) {\n                    series.disabled = !state.active[i];\n                });\n        }\n    };\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(multibar);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n\n        selection.each(function(data) {\n            var container = d3.select(this),\n                that = this;\n            nv.utils.initSVG(container);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() {\n                if (duration === 0)\n                    container.call(chart);\n                else\n                    container.transition()\n                        .duration(duration)\n                        .call(chart);\n            };\n            chart.container = this;\n\n            state\n                .setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            // DEPRECATED set state.disableddisabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display noData message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            x = multibar.xScale();\n            y = multibar.yScale();\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-multiBarWithLegend').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multiBarWithLegend').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y nv-axis');\n            gEnter.append('g').attr('class', 'nv-barsWrap');\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n            gEnter.append('g').attr('class', 'nv-controlsWrap');\n            gEnter.append('g').attr('class', 'nv-interactive');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                legend.width(availableWidth - controlWidth());\n\n                g.select('.nv-legendWrap')\n                    .datum(data)\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n                }\n\n                g.select('.nv-legendWrap')\n                    .attr('transform', 'translate(' + controlWidth() + ',' + (-margin.top) +')');\n            }\n\n            // Controls\n            if (!showControls) {\n                 g.select('.nv-controlsWrap').selectAll('*').remove();\n            } else {\n                var controlsData = [\n                    { key: controlLabels.grouped || 'Grouped', disabled: multibar.stacked() },\n                    { key: controlLabels.stacked || 'Stacked', disabled: !multibar.stacked() }\n                ];\n\n                controls.width(controlWidth()).color(['#444', '#444', '#444']);\n                g.select('.nv-controlsWrap')\n                    .datum(controlsData)\n                    .attr('transform', 'translate(0,' + (-margin.top) +')')\n                    .call(controls);\n            }\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            // Main Chart Component(s)\n            multibar\n                .disabled(data.map(function(series) { return series.disabled }))\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    return d.color || color(d, i);\n                }).filter(function(d,i) { return !data[i].disabled }));\n\n\n            var barsWrap = g.select('.nv-barsWrap')\n                .datum(data.filter(function(d) { return !d.disabled }));\n\n            barsWrap.call(multibar);\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize(-availableHeight, 0);\n\n                g.select('.nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + y.range()[0] + ')');\n                g.select('.nv-x.nv-axis')\n                    .call(xAxis);\n\n                var xTicks = g.select('.nv-x.nv-axis > g').selectAll('g');\n\n                xTicks\n                    .selectAll('line, text')\n                    .style('opacity', 1)\n\n                if (staggerLabels) {\n                    var getTranslate = function(x,y) {\n                        return \"translate(\" + x + \",\" + y + \")\";\n                    };\n\n                    var staggerUp = 5, staggerDown = 17;  //pixels to stagger by\n                    // Issue #140\n                    xTicks\n                        .selectAll(\"text\")\n                        .attr('transform', function(d,i,j) {\n                            return  getTranslate(0, (j % 2 == 0 ? staggerUp : staggerDown));\n                        });\n\n                    var totalInBetweenTicks = d3.selectAll(\".nv-x.nv-axis .nv-wrap g g text\")[0].length;\n                    g.selectAll(\".nv-x.nv-axis .nv-axisMaxMin text\")\n                        .attr(\"transform\", function(d,i) {\n                            return getTranslate(0, (i === 0 || totalInBetweenTicks % 2 !== 0) ? staggerDown : staggerUp);\n                        });\n                }\n\n                if (wrapLabels) {\n                    g.selectAll('.tick text')\n                        .call(nv.utils.wrapTicks, chart.xAxis.rangeBand())\n                }\n\n                if (reduceXTicks)\n                    xTicks\n                        .filter(function(d,i) {\n                            return i % Math.ceil(data[0].values.length / (availableWidth / 100)) !== 0;\n                        })\n                        .selectAll('text, line')\n                        .style('opacity', 0);\n\n                if(rotateLabels)\n                    xTicks\n                        .selectAll('.tick text')\n                        .attr('transform', 'rotate(' + rotateLabels + ' 0,0)')\n                        .style('text-anchor', rotateLabels > 0 ? 'start' : 'end');\n\n                g.select('.nv-x.nv-axis').selectAll('g.nv-axisMaxMin text')\n                    .style('opacity', 1);\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                    .tickSize( -availableWidth, 0);\n\n                g.select('.nv-y.nv-axis')\n                    .call(yAxis);\n            }\n\n            //Set up interactive layer\n            if (useInteractiveGuideline) {\n                interactiveLayer\n                    .width(availableWidth)\n                    .height(availableHeight)\n                    .margin({left:margin.left, top:margin.top})\n                    .svgContainer(container)\n                    .xScale(x);\n                wrap.select(\".nv-interactive\").call(interactiveLayer);\n            }\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState)\n                    state[key] = newState[key];\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            controls.dispatch.on('legendClick', function(d,i) {\n                if (!d.disabled) return;\n                controlsData = controlsData.map(function(s) {\n                    s.disabled = true;\n                    return s;\n                });\n                d.disabled = false;\n\n                switch (d.key) {\n                    case 'Grouped':\n                    case controlLabels.grouped:\n                        multibar.stacked(false);\n                        break;\n                    case 'Stacked':\n                    case controlLabels.stacked:\n                        multibar.stacked(true);\n                        break;\n                }\n\n                state.stacked = multibar.stacked();\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            // Update chart from a state object passed to event handler\n            dispatch.on('changeState', function(e) {\n                if (typeof e.disabled !== 'undefined') {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n                    state.disabled = e.disabled;\n                }\n                if (typeof e.stacked !== 'undefined') {\n                    multibar.stacked(e.stacked);\n                    state.stacked = e.stacked;\n                    stacked = e.stacked;\n                }\n                chart.update();\n            });\n\n            if (useInteractiveGuideline) {\n                interactiveLayer.dispatch.on('elementMousemove', function(e) {\n                    if (e.pointXValue == undefined) return;\n\n                    var singlePoint, pointIndex, pointXLocation, xValue, allData = [];\n                    data\n                        .filter(function(series, i) {\n                            series.seriesIndex = i;\n                            return !series.disabled;\n                        })\n                        .forEach(function(series,i) {\n                            pointIndex = x.domain().indexOf(e.pointXValue)\n\n                            var point = series.values[pointIndex];\n                            if (point === undefined) return;\n\n                            xValue = point.x;\n                            if (singlePoint === undefined) singlePoint = point;\n                            if (pointXLocation === undefined) pointXLocation = e.mouseX\n                            allData.push({\n                                key: series.key,\n                                value: chart.y()(point, pointIndex),\n                                color: color(series,series.seriesIndex),\n                                data: series.values[pointIndex]\n                            });\n                        });\n\n                    interactiveLayer.tooltip\n                        .data({\n                            value: xValue,\n                            index: pointIndex,\n                            series: allData\n                        })();\n\n                    interactiveLayer.renderGuideLine(pointXLocation);\n                });\n\n                interactiveLayer.dispatch.on(\"elementMouseout\",function(e) {\n                    interactiveLayer.tooltip.hidden(true);\n                });\n            }\n            else {\n                multibar.dispatch.on('elementMouseover.tooltip', function(evt) {\n                    evt.value = chart.x()(evt.data);\n                    evt['series'] = {\n                        key: evt.data.key,\n                        value: chart.y()(evt.data),\n                        color: evt.color\n                    };\n                    tooltip.data(evt).hidden(false);\n                });\n\n                multibar.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true);\n                });\n\n                multibar.dispatch.on('elementMousemove.tooltip', function(evt) {\n                    tooltip();\n                });\n            }\n        });\n\n        renderWatch.renderEnd('multibarchart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.multibar = multibar;\n    chart.legend = legend;\n    chart.controls = controls;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.state = state;\n    chart.tooltip = tooltip;\n    chart.interactiveLayer = interactiveLayer;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        showControls: {get: function(){return showControls;}, set: function(_){showControls=_;}},\n        controlLabels: {get: function(){return controlLabels;}, set: function(_){controlLabels=_;}},\n        showXAxis:      {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis:    {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        defaultState:    {get: function(){return defaultState;}, set: function(_){defaultState=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n        reduceXTicks:    {get: function(){return reduceXTicks;}, set: function(_){reduceXTicks=_;}},\n        rotateLabels:    {get: function(){return rotateLabels;}, set: function(_){rotateLabels=_;}},\n        staggerLabels:    {get: function(){return staggerLabels;}, set: function(_){staggerLabels=_;}},\n        wrapLabels:   {get: function(){return wrapLabels;}, set: function(_){wrapLabels=!!_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            multibar.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n            renderWatch.reset(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( rightAlignYAxis ? 'right' : 'left');\n        }},\n        useInteractiveGuideline: {get: function(){return useInteractiveGuideline;}, set: function(_){\n            useInteractiveGuideline = _;\n        }},\n        barColor:  {get: function(){return multibar.barColor;}, set: function(_){\n            multibar.barColor(_);\n            legend.color(function(d,i) {return d3.rgb('#ccc').darker(i * 1.5).toString();})\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, multibar);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.multiBarHorizontal = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 960\n        , height = 500\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container = null\n        , x = d3.scale.ordinal()\n        , y = d3.scale.linear()\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , getYerr = function(d) { return d.yErr }\n        , forceY = [0] // 0 is forced by default.. this makes sense for the majority of bar graphs... user can always do chart.forceY([]) to remove\n        , color = nv.utils.defaultColor()\n        , barColor = null // adding the ability to set the color for each rather than the whole group\n        , disabled // used in conjunction with barColor to communicate from multiBarHorizontalChart what series are disabled\n        , stacked = false\n        , showValues = false\n        , showBarLabels = false\n        , valuePadding = 60\n        , groupSpacing = 0.1\n        , fillOpacity = 0.75\n        , valueFormat = d3.format(',.2f')\n        , delay = 1200\n        , xDomain\n        , yDomain\n        , xRange\n        , yRange\n        , duration = 250\n        , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'elementMousemove', 'renderEnd')\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var x0, y0; //used to store previous scales\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                availableHeight = height - margin.top - margin.bottom;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            if (stacked)\n                data = d3.layout.stack()\n                    .offset('zero')\n                    .values(function(d){ return d.values })\n                    .y(getY)\n                (data);\n\n            //add series index and key to each data point for reference\n            data.forEach(function(series, i) {\n                series.values.forEach(function(point) {\n                    point.series = i;\n                    point.key = series.key;\n                });\n            });\n\n            // HACK for negative value stacking\n            if (stacked)\n                data[0].values.map(function(d,i) {\n                    var posBase = 0, negBase = 0;\n                    data.map(function(d) {\n                        var f = d.values[i]\n                        f.size = Math.abs(f.y);\n                        if (f.y<0)  {\n                            f.y1 = negBase - f.size;\n                            negBase = negBase - f.size;\n                        } else\n                        {\n                            f.y1 = posBase;\n                            posBase = posBase + f.size;\n                        }\n                    });\n                });\n\n            // Setup Scales\n            // remap and flatten the data for use in calculating the scales' domains\n            var seriesData = (xDomain && yDomain) ? [] : // if we know xDomain and yDomain, no need to calculate\n                data.map(function(d) {\n                    return d.values.map(function(d,i) {\n                        return { x: getX(d,i), y: getY(d,i), y0: d.y0, y1: d.y1 }\n                    })\n                });\n\n            x.domain(xDomain || d3.merge(seriesData).map(function(d) { return d.x }))\n                .rangeBands(xRange || [0, availableHeight], groupSpacing);\n\n            y.domain(yDomain || d3.extent(d3.merge(seriesData).map(function(d) { return stacked ? (d.y > 0 ? d.y1 + d.y : d.y1 ) : d.y }).concat(forceY)))\n\n            if (showValues && !stacked)\n                y.range(yRange || [(y.domain()[0] < 0 ? valuePadding : 0), availableWidth - (y.domain()[1] > 0 ? valuePadding : 0) ]);\n            else\n                y.range(yRange || [0, availableWidth]);\n\n            x0 = x0 || x;\n            y0 = y0 || d3.scale.linear().domain(y.domain()).range([y(0),y(0)]);\n\n            // Setup containers and skeleton of chart\n            var wrap = d3.select(this).selectAll('g.nv-wrap.nv-multibarHorizontal').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multibarHorizontal');\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-groups');\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            var groups = wrap.select('.nv-groups').selectAll('.nv-group')\n                .data(function(d) { return d }, function(d,i) { return i });\n            groups.enter().append('g')\n                .style('stroke-opacity', 1e-6)\n                .style('fill-opacity', 1e-6);\n            groups.exit().watchTransition(renderWatch, 'multibarhorizontal: exit groups')\n                .style('stroke-opacity', 1e-6)\n                .style('fill-opacity', 1e-6)\n                .remove();\n            groups\n                .attr('class', function(d,i) { return 'nv-group nv-series-' + i })\n                .classed('hover', function(d) { return d.hover })\n                .style('fill', function(d,i){ return color(d, i) })\n                .style('stroke', function(d,i){ return color(d, i) });\n            groups.watchTransition(renderWatch, 'multibarhorizontal: groups')\n                .style('stroke-opacity', 1)\n                .style('fill-opacity', fillOpacity);\n\n            var bars = groups.selectAll('g.nv-bar')\n                .data(function(d) { return d.values });\n            bars.exit().remove();\n\n            var barsEnter = bars.enter().append('g')\n                .attr('transform', function(d,i,j) {\n                    return 'translate(' + y0(stacked ? d.y0 : 0) + ',' + (stacked ? 0 : (j * x.rangeBand() / data.length ) + x(getX(d,i))) + ')'\n                });\n\n            barsEnter.append('rect')\n                .attr('width', 0)\n                .attr('height', x.rangeBand() / (stacked ? 1 : data.length) )\n\n            bars\n                .on('mouseover', function(d,i) { //TODO: figure out why j works above, but not here\n                    d3.select(this).classed('hover', true);\n                    dispatch.elementMouseover({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mouseout', function(d,i) {\n                    d3.select(this).classed('hover', false);\n                    dispatch.elementMouseout({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mouseout', function(d,i) {\n                    dispatch.elementMouseout({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('mousemove', function(d,i) {\n                    dispatch.elementMousemove({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                })\n                .on('click', function(d,i) {\n                    var element = this;\n                    dispatch.elementClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\"),\n                        event: d3.event,\n                        element: element\n                    });\n                    d3.event.stopPropagation();\n                })\n                .on('dblclick', function(d,i) {\n                    dispatch.elementDblClick({\n                        data: d,\n                        index: i,\n                        color: d3.select(this).style(\"fill\")\n                    });\n                    d3.event.stopPropagation();\n                });\n\n            if (getYerr(data[0],0)) {\n                barsEnter.append('polyline');\n\n                bars.select('polyline')\n                    .attr('fill', 'none')\n                    .attr('points', function(d,i) {\n                        var xerr = getYerr(d,i)\n                            , mid = 0.8 * x.rangeBand() / ((stacked ? 1 : data.length) * 2);\n                        xerr = xerr.length ? xerr : [-Math.abs(xerr), Math.abs(xerr)];\n                        xerr = xerr.map(function(e) { return y(e) - y(0); });\n                        var a = [[xerr[0],-mid], [xerr[0],mid], [xerr[0],0], [xerr[1],0], [xerr[1],-mid], [xerr[1],mid]];\n                        return a.map(function (path) { return path.join(',') }).join(' ');\n                    })\n                    .attr('transform', function(d,i) {\n                        var mid = x.rangeBand() / ((stacked ? 1 : data.length) * 2);\n                        return 'translate(' + (getY(d,i) < 0 ? 0 : y(getY(d,i)) - y(0)) + ', ' + mid + ')'\n                    });\n            }\n\n            barsEnter.append('text');\n\n            if (showValues && !stacked) {\n                bars.select('text')\n                    .attr('text-anchor', function(d,i) { return getY(d,i) < 0 ? 'end' : 'start' })\n                    .attr('y', x.rangeBand() / (data.length * 2))\n                    .attr('dy', '.32em')\n                    .text(function(d,i) {\n                        var t = valueFormat(getY(d,i))\n                            , yerr = getYerr(d,i);\n                        if (yerr === undefined)\n                            return t;\n                        if (!yerr.length)\n                            return t + '±' + valueFormat(Math.abs(yerr));\n                        return t + '+' + valueFormat(Math.abs(yerr[1])) + '-' + valueFormat(Math.abs(yerr[0]));\n                    });\n                bars.watchTransition(renderWatch, 'multibarhorizontal: bars')\n                    .select('text')\n                    .attr('x', function(d,i) { return getY(d,i) < 0 ? -4 : y(getY(d,i)) - y(0) + 4 })\n            } else {\n                bars.selectAll('text').text('');\n            }\n\n            if (showBarLabels && !stacked) {\n                barsEnter.append('text').classed('nv-bar-label',true);\n                bars.select('text.nv-bar-label')\n                    .attr('text-anchor', function(d,i) { return getY(d,i) < 0 ? 'start' : 'end' })\n                    .attr('y', x.rangeBand() / (data.length * 2))\n                    .attr('dy', '.32em')\n                    .text(function(d,i) { return getX(d,i) });\n                bars.watchTransition(renderWatch, 'multibarhorizontal: bars')\n                    .select('text.nv-bar-label')\n                    .attr('x', function(d,i) { return getY(d,i) < 0 ? y(0) - y(getY(d,i)) + 4 : -4 });\n            }\n            else {\n                bars.selectAll('text.nv-bar-label').text('');\n            }\n\n            bars\n                .attr('class', function(d,i) { return getY(d,i) < 0 ? 'nv-bar negative' : 'nv-bar positive'})\n\n            if (barColor) {\n                if (!disabled) disabled = data.map(function() { return true });\n                bars\n                    .style('fill', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); })\n                    .style('stroke', function(d,i,j) { return d3.rgb(barColor(d,i)).darker(  disabled.map(function(d,i) { return i }).filter(function(d,i){ return !disabled[i]  })[j]   ).toString(); });\n            }\n\n            if (stacked)\n                bars.watchTransition(renderWatch, 'multibarhorizontal: bars')\n                    .attr('transform', function(d,i) {\n                        return 'translate(' + y(d.y1) + ',' + x(getX(d,i)) + ')'\n                    })\n                    .select('rect')\n                    .attr('width', function(d,i) {\n                        return Math.abs(y(getY(d,i) + d.y0) - y(d.y0)) || 0\n                    })\n                    .attr('height', x.rangeBand() );\n            else\n                bars.watchTransition(renderWatch, 'multibarhorizontal: bars')\n                    .attr('transform', function(d,i) {\n                        //TODO: stacked must be all positive or all negative, not both?\n                        return 'translate(' +\n                            (getY(d,i) < 0 ? y(getY(d,i)) : y(0))\n                            + ',' +\n                            (d.series * x.rangeBand() / data.length\n                                +\n                                x(getX(d,i)) )\n                            + ')'\n                    })\n                    .select('rect')\n                    .attr('height', x.rangeBand() / data.length )\n                    .attr('width', function(d,i) {\n                        return Math.max(Math.abs(y(getY(d,i)) - y(0)),1) || 0\n                    });\n\n            //store old scales for use in transitions on update\n            x0 = x.copy();\n            y0 = y.copy();\n\n        });\n\n        renderWatch.renderEnd('multibarHorizontal immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:   {get: function(){return width;}, set: function(_){width=_;}},\n        height:  {get: function(){return height;}, set: function(_){height=_;}},\n        x:       {get: function(){return getX;}, set: function(_){getX=_;}},\n        y:       {get: function(){return getY;}, set: function(_){getY=_;}},\n        yErr:       {get: function(){return getYerr;}, set: function(_){getYerr=_;}},\n        xScale:  {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:  {get: function(){return y;}, set: function(_){y=_;}},\n        xDomain: {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain: {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:  {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:  {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        forceY:  {get: function(){return forceY;}, set: function(_){forceY=_;}},\n        stacked: {get: function(){return stacked;}, set: function(_){stacked=_;}},\n        showValues: {get: function(){return showValues;}, set: function(_){showValues=_;}},\n        // this shows the group name, seems pointless?\n        //showBarLabels:    {get: function(){return showBarLabels;}, set: function(_){showBarLabels=_;}},\n        disabled:     {get: function(){return disabled;}, set: function(_){disabled=_;}},\n        id:           {get: function(){return id;}, set: function(_){id=_;}},\n        valueFormat:  {get: function(){return valueFormat;}, set: function(_){valueFormat=_;}},\n        valuePadding: {get: function(){return valuePadding;}, set: function(_){valuePadding=_;}},\n        groupSpacing: {get: function(){return groupSpacing;}, set: function(_){groupSpacing=_;}},\n        fillOpacity:  {get: function(){return fillOpacity;}, set: function(_){fillOpacity=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        barColor:  {get: function(){return barColor;}, set: function(_){\n            barColor = _ ? nv.utils.getColor(_) : null;\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.multiBarHorizontalChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var multibar = nv.models.multiBarHorizontal()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n        , legend = nv.models.legend().height(30)\n        , controls = nv.models.legend().height(30)\n        , tooltip = nv.models.tooltip()\n        ;\n\n    var margin = {top: 30, right: 20, bottom: 50, left: 60}\n        , marginTop = null\n        , width = null\n        , height = null\n        , color = nv.utils.defaultColor()\n        , showControls = true\n        , controlLabels = {}\n        , showLegend = true\n        , showXAxis = true\n        , showYAxis = true\n        , stacked = false\n        , x //can be accessed via chart.xScale()\n        , y //can be accessed via chart.yScale()\n        , state = nv.utils.state()\n        , defaultState = null\n        , noData = null\n        , dispatch = d3.dispatch('stateChange', 'changeState','renderEnd')\n        , controlWidth = function() { return showControls ? 180 : 0 }\n        , duration = 250\n        ;\n\n    state.stacked = false; // DEPRECATED Maintained for backward compatibility\n\n    multibar.stacked(stacked);\n\n    xAxis\n        .orient('left')\n        .tickPadding(5)\n        .showMaxMin(false)\n        .tickFormat(function(d) { return d })\n    ;\n    yAxis\n        .orient('bottom')\n        .tickFormat(d3.format(',.1f'))\n    ;\n\n    tooltip\n        .duration(0)\n        .valueFormatter(function(d, i) {\n            return yAxis.tickFormat()(d, i);\n        })\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        });\n\n    controls.updateState(false);\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled }),\n                stacked: stacked\n            };\n        }\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.stacked !== undefined)\n                stacked = state.stacked;\n            if (state.active !== undefined)\n                data.forEach(function(series,i) {\n                    series.disabled = !state.active[i];\n                });\n        }\n    };\n\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(multibar);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n\n        selection.each(function(data) {\n            var container = d3.select(this),\n                that = this;\n            nv.utils.initSVG(container);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() { container.transition().duration(duration).call(chart) };\n            chart.container = this;\n\n            stacked = multibar.stacked();\n\n            state\n                .setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            // DEPRECATED set state.disableddisabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            x = multibar.xScale();\n            y = multibar.yScale().clamp(true);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-multiBarHorizontalChart').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-multiBarHorizontalChart').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y nv-axis')\n                .append('g').attr('class', 'nv-zeroLine')\n                .append('line');\n            gEnter.append('g').attr('class', 'nv-barsWrap');\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n            gEnter.append('g').attr('class', 'nv-controlsWrap');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                legend.width(availableWidth - controlWidth());\n\n                g.select('.nv-legendWrap')\n                    .datum(data)\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n                }\n\n                g.select('.nv-legendWrap')\n                    .attr('transform', 'translate(' + controlWidth() + ',' + (-margin.top) +')');\n            }\n\n            // Controls\n            if (!showControls) {\n                 g.select('.nv-controlsWrap').selectAll('*').remove();\n            } else {\n                var controlsData = [\n                    { key: controlLabels.grouped || 'Grouped', disabled: multibar.stacked() },\n                    { key: controlLabels.stacked || 'Stacked', disabled: !multibar.stacked() }\n                ];\n\n                controls.width(controlWidth()).color(['#444', '#444', '#444']);\n                g.select('.nv-controlsWrap')\n                    .datum(controlsData)\n                    .attr('transform', 'translate(0,' + (-margin.top) +')')\n                    .call(controls);\n            }\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            // Main Chart Component(s)\n            multibar\n                .disabled(data.map(function(series) { return series.disabled }))\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    return d.color || color(d, i);\n                }).filter(function(d,i) { return !data[i].disabled }));\n\n            var barsWrap = g.select('.nv-barsWrap')\n                .datum(data.filter(function(d) { return !d.disabled }));\n\n            barsWrap.transition().call(multibar);\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/24, data) )\n                    .tickSize(-availableWidth, 0);\n\n                g.select('.nv-x.nv-axis').call(xAxis);\n\n                var xTicks = g.select('.nv-x.nv-axis').selectAll('g');\n\n                xTicks\n                    .selectAll('line, text');\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize( -availableHeight, 0);\n\n                g.select('.nv-y.nv-axis')\n                    .attr('transform', 'translate(0,' + availableHeight + ')');\n                g.select('.nv-y.nv-axis').call(yAxis);\n            }\n\n            // Zero line\n            g.select(\".nv-zeroLine line\")\n                .attr(\"x1\", y(0))\n                .attr(\"x2\", y(0))\n                .attr(\"y1\", 0)\n                .attr(\"y2\", -availableHeight)\n            ;\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState)\n                    state[key] = newState[key];\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            controls.dispatch.on('legendClick', function(d,i) {\n                if (!d.disabled) return;\n                controlsData = controlsData.map(function(s) {\n                    s.disabled = true;\n                    return s;\n                });\n                d.disabled = false;\n\n                switch (d.key) {\n                    case 'Grouped':\n                    case controlLabels.grouped:\n                        multibar.stacked(false);\n                        break;\n                    case 'Stacked':\n                    case controlLabels.stacked:\n                        multibar.stacked(true);\n                        break;\n                }\n\n                state.stacked = multibar.stacked();\n                dispatch.stateChange(state);\n                stacked = multibar.stacked();\n\n                chart.update();\n            });\n\n            // Update chart from a state object passed to event handler\n            dispatch.on('changeState', function(e) {\n\n                if (typeof e.disabled !== 'undefined') {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n\n                    state.disabled = e.disabled;\n                }\n\n                if (typeof e.stacked !== 'undefined') {\n                    multibar.stacked(e.stacked);\n                    state.stacked = e.stacked;\n                    stacked = e.stacked;\n                }\n\n                chart.update();\n            });\n        });\n        renderWatch.renderEnd('multibar horizontal chart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    multibar.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt.value = chart.x()(evt.data);\n        evt['series'] = {\n            key: evt.data.key,\n            value: chart.y()(evt.data),\n            color: evt.color\n        };\n        tooltip.data(evt).hidden(false);\n    });\n\n    multibar.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    multibar.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.multibar = multibar;\n    chart.legend = legend;\n    chart.controls = controls;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.state = state;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        showControls: {get: function(){return showControls;}, set: function(_){showControls=_;}},\n        controlLabels: {get: function(){return controlLabels;}, set: function(_){controlLabels=_;}},\n        showXAxis:      {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis:    {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        defaultState:    {get: function(){return defaultState;}, set: function(_){defaultState=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            multibar.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n        }},\n        barColor:  {get: function(){return multibar.barColor;}, set: function(_){\n            multibar.barColor(_);\n            legend.color(function(d,i) {return d3.rgb('#ccc').darker(i * 1.5).toString();})\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, multibar);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","nv.models.multiChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 30, right: 20, bottom: 50, left: 60},\n        marginTop = null,\n        color = nv.utils.defaultColor(),\n        width = null,\n        height = null,\n        showLegend = true,\n        noData = null,\n        yDomain1,\n        yDomain2,\n        getX = function(d) { return d.x },\n        getY = function(d) { return d.y},\n        interpolate = 'linear',\n        useVoronoi = true,\n        interactiveLayer = nv.interactiveGuideline(),\n        useInteractiveGuideline = false,\n        legendRightAxisHint = ' (right axis)',\n        duration = 250\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var x = d3.scale.linear(),\n        yScale1 = d3.scale.linear(),\n        yScale2 = d3.scale.linear(),\n\n        lines1 = nv.models.line().yScale(yScale1).duration(duration),\n        lines2 = nv.models.line().yScale(yScale2).duration(duration),\n\n        scatters1 = nv.models.scatter().yScale(yScale1).duration(duration),\n        scatters2 = nv.models.scatter().yScale(yScale2).duration(duration),\n\n        bars1 = nv.models.multiBar().stacked(false).yScale(yScale1).duration(duration),\n        bars2 = nv.models.multiBar().stacked(false).yScale(yScale2).duration(duration),\n\n        stack1 = nv.models.stackedArea().yScale(yScale1).duration(duration),\n        stack2 = nv.models.stackedArea().yScale(yScale2).duration(duration),\n\n        xAxis = nv.models.axis().scale(x).orient('bottom').tickPadding(5).duration(duration),\n        yAxis1 = nv.models.axis().scale(yScale1).orient('left').duration(duration),\n        yAxis2 = nv.models.axis().scale(yScale2).orient('right').duration(duration),\n\n        legend = nv.models.legend().height(30),\n        tooltip = nv.models.tooltip(),\n        dispatch = d3.dispatch();\n\n    var charts = [lines1, lines2, scatters1, scatters2, bars1, bars2, stack1, stack2];\n\n    function chart(selection) {\n        selection.each(function(data) {\n            var container = d3.select(this),\n                that = this;\n            nv.utils.initSVG(container);\n\n            chart.update = function() { container.transition().call(chart); };\n            chart.container = this;\n\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            var dataLines1 = data.filter(function(d) {return d.type == 'line' && d.yAxis == 1});\n            var dataLines2 = data.filter(function(d) {return d.type == 'line' && d.yAxis == 2});\n            var dataScatters1 = data.filter(function(d) {return d.type == 'scatter' && d.yAxis == 1});\n            var dataScatters2 = data.filter(function(d) {return d.type == 'scatter' && d.yAxis == 2});\n            var dataBars1 =  data.filter(function(d) {return d.type == 'bar'  && d.yAxis == 1});\n            var dataBars2 =  data.filter(function(d) {return d.type == 'bar'  && d.yAxis == 2});\n            var dataStack1 = data.filter(function(d) {return d.type == 'area' && d.yAxis == 1});\n            var dataStack2 = data.filter(function(d) {return d.type == 'area' && d.yAxis == 2});\n\n            // Display noData message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container);\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            var series1 = data.filter(function(d) {return !d.disabled && d.yAxis == 1})\n                .map(function(d) {\n                    return d.values.map(function(d,i) {\n                        return { x: getX(d), y: getY(d) }\n                    })\n                });\n\n            var series2 = data.filter(function(d) {return !d.disabled && d.yAxis == 2})\n                .map(function(d) {\n                    return d.values.map(function(d,i) {\n                        return { x: getX(d), y: getY(d) }\n                    })\n                });\n\n            x   .domain(d3.extent(d3.merge(series1.concat(series2)), function(d) { return d.x }))\n                .range([0, availableWidth]);\n\n            var wrap = container.selectAll('g.wrap.multiChart').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'wrap nvd3 multiChart').append('g');\n\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y1 nv-axis');\n            gEnter.append('g').attr('class', 'nv-y2 nv-axis');\n            gEnter.append('g').attr('class', 'stack1Wrap');\n            gEnter.append('g').attr('class', 'stack2Wrap');\n            gEnter.append('g').attr('class', 'bars1Wrap');\n            gEnter.append('g').attr('class', 'bars2Wrap');\n            gEnter.append('g').attr('class', 'scatters1Wrap');\n            gEnter.append('g').attr('class', 'scatters2Wrap');\n            gEnter.append('g').attr('class', 'lines1Wrap');\n            gEnter.append('g').attr('class', 'lines2Wrap');\n            gEnter.append('g').attr('class', 'legendWrap');\n            gEnter.append('g').attr('class', 'nv-interactive');\n\n            var g = wrap.select('g');\n\n            var color_array = data.map(function(d,i) {\n                return data[i].color || color(d, i);\n            });\n\n            // Legend\n            if (!showLegend) {\n                g.select('.legendWrap').selectAll('*').remove();\n            } else {\n                var legendWidth = legend.align() ? availableWidth / 2 : availableWidth;\n                var legendXPosition = legend.align() ? legendWidth : 0;\n\n                legend.width(legendWidth);\n                legend.color(color_array);\n\n                g.select('.legendWrap')\n                    .datum(data.map(function(series) {\n                        series.originalKey = series.originalKey === undefined ? series.key : series.originalKey;\n                        series.key = series.originalKey + (series.yAxis == 1 ? '' : legendRightAxisHint);\n                        return series;\n                    }))\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n                }\n\n                g.select('.legendWrap')\n                    .attr('transform', 'translate(' + legendXPosition + ',' + (-margin.top) +')');\n            }\n\n            lines1\n                .width(availableWidth)\n                .height(availableHeight)\n                .interpolate(interpolate)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 1 && data[i].type == 'line'}));\n            lines2\n                .width(availableWidth)\n                .height(availableHeight)\n                .interpolate(interpolate)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 2 && data[i].type == 'line'}));\n            scatters1\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 1 && data[i].type == 'scatter'}));\n            scatters2\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 2 && data[i].type == 'scatter'}));\n            bars1\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 1 && data[i].type == 'bar'}));\n            bars2\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 2 && data[i].type == 'bar'}));\n            stack1\n                .width(availableWidth)\n                .height(availableHeight)\n                .interpolate(interpolate)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 1 && data[i].type == 'area'}));\n            stack2\n                .width(availableWidth)\n                .height(availableHeight)\n                .interpolate(interpolate)\n                .color(color_array.filter(function(d,i) { return !data[i].disabled && data[i].yAxis == 2 && data[i].type == 'area'}));\n\n            g.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            var lines1Wrap = g.select('.lines1Wrap')\n                .datum(dataLines1.filter(function(d){return !d.disabled}));\n            var scatters1Wrap = g.select('.scatters1Wrap')\n                .datum(dataScatters1.filter(function(d){return !d.disabled}));\n            var bars1Wrap = g.select('.bars1Wrap')\n                .datum(dataBars1.filter(function(d){return !d.disabled}));\n            var stack1Wrap = g.select('.stack1Wrap')\n                .datum(dataStack1.filter(function(d){return !d.disabled}));\n            var lines2Wrap = g.select('.lines2Wrap')\n                .datum(dataLines2.filter(function(d){return !d.disabled}));\n            var scatters2Wrap = g.select('.scatters2Wrap')\n                .datum(dataScatters2.filter(function(d){return !d.disabled}));\n            var bars2Wrap = g.select('.bars2Wrap')\n                .datum(dataBars2.filter(function(d){return !d.disabled}));\n            var stack2Wrap = g.select('.stack2Wrap')\n                .datum(dataStack2.filter(function(d){return !d.disabled}));\n\n            var extraValue1 = dataStack1.length ? dataStack1.map(function(a){return a.values}).reduce(function(a,b){\n                return a.map(function(aVal,i){return {x: aVal.x, y: aVal.y + b[i].y}})\n            }).concat([{x:0, y:0}]) : [];\n            var extraValue2 = dataStack2.length ? dataStack2.map(function(a){return a.values}).reduce(function(a,b){\n                return a.map(function(aVal,i){return {x: aVal.x, y: aVal.y + b[i].y}})\n            }).concat([{x:0, y:0}]) : [];\n\n            yScale1 .domain(yDomain1 || d3.extent(d3.merge(series1).concat(extraValue1), function(d) { return d.y } ))\n                .range([0, availableHeight]);\n\n            yScale2 .domain(yDomain2 || d3.extent(d3.merge(series2).concat(extraValue2), function(d) { return d.y } ))\n                .range([0, availableHeight]);\n\n            lines1.yDomain(yScale1.domain());\n            scatters1.yDomain(yScale1.domain());\n            bars1.yDomain(yScale1.domain());\n            stack1.yDomain(yScale1.domain());\n\n            lines2.yDomain(yScale2.domain());\n            scatters2.yDomain(yScale2.domain());\n            bars2.yDomain(yScale2.domain());\n            stack2.yDomain(yScale2.domain());\n\n            if(dataStack1.length){d3.transition(stack1Wrap).call(stack1);}\n            if(dataStack2.length){d3.transition(stack2Wrap).call(stack2);}\n\n            if(dataBars1.length){d3.transition(bars1Wrap).call(bars1);}\n            if(dataBars2.length){d3.transition(bars2Wrap).call(bars2);}\n\n            if(dataLines1.length){d3.transition(lines1Wrap).call(lines1);}\n            if(dataLines2.length){d3.transition(lines2Wrap).call(lines2);}\n\n            if(dataScatters1.length){d3.transition(scatters1Wrap).call(scatters1);}\n            if(dataScatters2.length){d3.transition(scatters2Wrap).call(scatters2);}\n\n            xAxis\n                ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                .tickSize(-availableHeight, 0);\n\n            g.select('.nv-x.nv-axis')\n                .attr('transform', 'translate(0,' + availableHeight + ')');\n            d3.transition(g.select('.nv-x.nv-axis'))\n                .call(xAxis);\n\n            yAxis1\n                ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                .tickSize( -availableWidth, 0);\n\n\n            d3.transition(g.select('.nv-y1.nv-axis'))\n                .call(yAxis1);\n\n            yAxis2\n                ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                .tickSize( -availableWidth, 0);\n\n            d3.transition(g.select('.nv-y2.nv-axis'))\n                .call(yAxis2);\n\n            g.select('.nv-y1.nv-axis')\n                .classed('nv-disabled', series1.length ? false : true)\n                .attr('transform', 'translate(' + x.range()[0] + ',0)');\n\n            g.select('.nv-y2.nv-axis')\n                .classed('nv-disabled', series2.length ? false : true)\n                .attr('transform', 'translate(' + x.range()[1] + ',0)');\n\n            legend.dispatch.on('stateChange', function(newState) {\n                chart.update();\n            });\n\n            if(useInteractiveGuideline){\n                interactiveLayer\n                    .width(availableWidth)\n                    .height(availableHeight)\n                    .margin({left:margin.left, top:margin.top})\n                    .svgContainer(container)\n                    .xScale(x);\n                wrap.select(\".nv-interactive\").call(interactiveLayer);\n            }\n\n            //============================================================\n            // Event Handling/Dispatching\n            //------------------------------------------------------------\n\n            function mouseover_line(evt) {\n                var yaxis = data[evt.seriesIndex].yAxis === 2 ? yAxis2 : yAxis1;\n                evt.value = evt.point.x;\n                evt.series = {\n                    value: evt.point.y,\n                    color: evt.point.color,\n                    key: evt.series.key\n                };\n                tooltip\n                    .duration(0)\n                    .headerFormatter(function(d, i) {\n                    \treturn xAxis.tickFormat()(d, i);\n                    })\n                    .valueFormatter(function(d, i) {\n                        return yaxis.tickFormat()(d, i);\n                    })\n                    .data(evt)\n                    .hidden(false);\n            }\n\n            function mouseover_scatter(evt) {\n                var yaxis = data[evt.seriesIndex].yAxis === 2 ? yAxis2 : yAxis1;\n                evt.value = evt.point.x;\n                evt.series = {\n                    value: evt.point.y,\n                    color: evt.point.color,\n                    key: evt.series.key\n                };\n                tooltip\n                    .duration(100)\n                    .headerFormatter(function(d, i) {\n                    \treturn xAxis.tickFormat()(d, i);\n                    })\n                    .valueFormatter(function(d, i) {\n                        return yaxis.tickFormat()(d, i);\n                    })\n                    .data(evt)\n                    .hidden(false);\n            }\n\n            function mouseover_stack(evt) {\n                var yaxis = data[evt.seriesIndex].yAxis === 2 ? yAxis2 : yAxis1;\n                evt.point['x'] = stack1.x()(evt.point);\n                evt.point['y'] = stack1.y()(evt.point);\n                tooltip\n                    .duration(0)\n                    .headerFormatter(function(d, i) {\n                    \treturn xAxis.tickFormat()(d, i);\n                    })\n                    .valueFormatter(function(d, i) {\n                        return yaxis.tickFormat()(d, i);\n                    })\n                    .data(evt)\n                    .hidden(false);\n            }\n\n            function mouseover_bar(evt) {\n                var yaxis = data[evt.data.series].yAxis === 2 ? yAxis2 : yAxis1;\n\n                evt.value = bars1.x()(evt.data);\n                evt['series'] = {\n                    value: bars1.y()(evt.data),\n                    color: evt.color,\n                    key: evt.data.key\n                };\n                tooltip\n                    .duration(0)\n                    .headerFormatter(function(d, i) {\n                    \treturn xAxis.tickFormat()(d, i);\n                    })\n                    .valueFormatter(function(d, i) {\n                        return yaxis.tickFormat()(d, i);\n                    })\n                    .data(evt)\n                    .hidden(false);\n            }\n\n\n\n            function clearHighlights() {\n              for(var i=0, il=charts.length; i < il; i++){\n                var chart = charts[i];\n                try {\n                  chart.clearHighlights();\n                } catch(e){}\n              }\n            }\n\n            function highlightPoint(serieIndex, pointIndex, b){\n              for(var i=0, il=charts.length; i < il; i++){\n                var chart = charts[i];\n                try {\n                  chart.highlightPoint(serieIndex, pointIndex, b);\n                } catch(e){}\n              }\n            }\n\n            if(useInteractiveGuideline){\n                interactiveLayer.dispatch.on('elementMousemove', function(e) {\n                    clearHighlights();\n                    var singlePoint, pointIndex, pointXLocation, allData = [];\n                    data\n                    .filter(function(series, i) {\n                        series.seriesIndex = i;\n                        return !series.disabled;\n                    })\n                    .forEach(function(series,i) {\n                        var extent = x.domain();\n                        var currentValues = series.values.filter(function(d,i) {\n                            return chart.x()(d,i) >= extent[0] && chart.x()(d,i) <= extent[1];\n                        });\n\n                        pointIndex = nv.interactiveBisect(currentValues, e.pointXValue, chart.x());\n                        var point = currentValues[pointIndex];\n                        var pointYValue = chart.y()(point, pointIndex);\n                        if (pointYValue !== null) {\n                            highlightPoint(i, pointIndex, true);\n                        }\n                        if (point === undefined) return;\n                        if (singlePoint === undefined) singlePoint = point;\n                        if (pointXLocation === undefined) pointXLocation = x(chart.x()(point,pointIndex));\n                        allData.push({\n                            key: series.key,\n                            value: pointYValue,\n                            color: color(series,series.seriesIndex),\n                            data: point,\n                            yAxis: series.yAxis == 2 ? yAxis2 : yAxis1\n                        });\n                    });\n\n                    var defaultValueFormatter = function(d,i) {\n                        var yAxis = allData[i].yAxis;\n                        return d == null ? \"N/A\" : yAxis.tickFormat()(d);\n                    };\n\n                    interactiveLayer.tooltip\n                        .headerFormatter(function(d, i) {\n                            return xAxis.tickFormat()(d, i);\n                        })\n                        .valueFormatter(interactiveLayer.tooltip.valueFormatter() || defaultValueFormatter)\n                        .data({\n                            value: chart.x()( singlePoint,pointIndex ),\n                            index: pointIndex,\n                            series: allData\n                        })();\n\n                    interactiveLayer.renderGuideLine(pointXLocation);\n                });\n\n                interactiveLayer.dispatch.on(\"elementMouseout\",function(e) {\n                    clearHighlights();\n                });\n            } else {\n                lines1.dispatch.on('elementMouseover.tooltip', mouseover_line);\n                lines2.dispatch.on('elementMouseover.tooltip', mouseover_line);\n                lines1.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true)\n                });\n                lines2.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true)\n                });\n\n                scatters1.dispatch.on('elementMouseover.tooltip', mouseover_scatter);\n                scatters2.dispatch.on('elementMouseover.tooltip', mouseover_scatter);\n                scatters1.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true)\n                });\n                scatters2.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true)\n                });\n\n                stack1.dispatch.on('elementMouseover.tooltip', mouseover_stack);\n                stack2.dispatch.on('elementMouseover.tooltip', mouseover_stack);\n                stack1.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true)\n                });\n                stack2.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true)\n                });\n\n                bars1.dispatch.on('elementMouseover.tooltip', mouseover_bar);\n                bars2.dispatch.on('elementMouseover.tooltip', mouseover_bar);\n\n                bars1.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true);\n                });\n                bars2.dispatch.on('elementMouseout.tooltip', function(evt) {\n                    tooltip.hidden(true);\n                });\n                bars1.dispatch.on('elementMousemove.tooltip', function(evt) {\n                    tooltip();\n                });\n                bars2.dispatch.on('elementMousemove.tooltip', function(evt) {\n                    tooltip();\n                });\n            }\n        });\n\n        return chart;\n    }\n\n    //============================================================\n    // Global getters and setters\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.legend = legend;\n    chart.lines1 = lines1;\n    chart.lines2 = lines2;\n    chart.scatters1 = scatters1;\n    chart.scatters2 = scatters2;\n    chart.bars1 = bars1;\n    chart.bars2 = bars2;\n    chart.stack1 = stack1;\n    chart.stack2 = stack2;\n    chart.xAxis = xAxis;\n    chart.yAxis1 = yAxis1;\n    chart.yAxis2 = yAxis2;\n    chart.tooltip = tooltip;\n    chart.interactiveLayer = interactiveLayer;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        yDomain1:      {get: function(){return yDomain1;}, set: function(_){yDomain1=_;}},\n        yDomain2:    {get: function(){return yDomain2;}, set: function(_){yDomain2=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n        interpolate:    {get: function(){return interpolate;}, set: function(_){interpolate=_;}},\n        legendRightAxisHint:    {get: function(){return legendRightAxisHint;}, set: function(_){legendRightAxisHint=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        x: {get: function(){return getX;}, set: function(_){\n            getX = _;\n            lines1.x(_);\n            lines2.x(_);\n            scatters1.x(_);\n            scatters2.x(_);\n            bars1.x(_);\n            bars2.x(_);\n            stack1.x(_);\n            stack2.x(_);\n        }},\n        y: {get: function(){return getY;}, set: function(_){\n            getY = _;\n            lines1.y(_);\n            lines2.y(_);\n            scatters1.y(_);\n            scatters2.y(_);\n            stack1.y(_);\n            stack2.y(_);\n            bars1.y(_);\n            bars2.y(_);\n        }},\n        useVoronoi: {get: function(){return useVoronoi;}, set: function(_){\n            useVoronoi=_;\n            lines1.useVoronoi(_);\n            lines2.useVoronoi(_);\n            stack1.useVoronoi(_);\n            stack2.useVoronoi(_);\n        }},\n\n        useInteractiveGuideline: {get: function(){return useInteractiveGuideline;}, set: function(_){\n            useInteractiveGuideline = _;\n            if (useInteractiveGuideline) {\n                lines1.interactive(false);\n                lines1.useVoronoi(false);\n                lines2.interactive(false);\n                lines2.useVoronoi(false);\n                stack1.interactive(false);\n                stack1.useVoronoi(false);\n                stack2.interactive(false);\n                stack2.useVoronoi(false);\n                scatters1.interactive(false);\n                scatters2.interactive(false);\n            }\n        }},\n\n        duration: {get: function(){return duration;}, set: function(_) {\n            duration = _;\n            [lines1, lines2, stack1, stack2, scatters1, scatters2, xAxis, yAxis1, yAxis2].forEach(function(model){\n              model.duration(duration);\n            });\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.ohlcBar = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = null\n        , height = null\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container = null\n        , x = d3.scale.linear()\n        , y = d3.scale.linear()\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , getOpen = function(d) { return d.open }\n        , getClose = function(d) { return d.close }\n        , getHigh = function(d) { return d.high }\n        , getLow = function(d) { return d.low }\n        , forceX = []\n        , forceY = []\n        , padData     = false // If true, adds half a data points width to front and back, for lining up a line chart with a bar chart\n        , clipEdge = true\n        , color = nv.utils.defaultColor()\n        , interactive = false\n        , xDomain\n        , yDomain\n        , xRange\n        , yRange\n        , dispatch = d3.dispatch('stateChange', 'changeState', 'renderEnd', 'chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'elementMousemove')\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    function chart(selection) {\n        selection.each(function(data) {\n            container = d3.select(this);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            nv.utils.initSVG(container);\n\n            // ohlc bar width.\n            var w = (availableWidth / data[0].values.length) * .9;\n\n            // Setup Scales\n            x.domain(xDomain || d3.extent(data[0].values.map(getX).concat(forceX) ));\n\n            if (padData)\n                x.range(xRange || [availableWidth * .5 / data[0].values.length, availableWidth * (data[0].values.length - .5)  / data[0].values.length ]);\n            else\n                x.range(xRange || [5 + w/2, availableWidth - w/2 - 5]);\n\n            y.domain(yDomain || [\n                    d3.min(data[0].values.map(getLow).concat(forceY)),\n                    d3.max(data[0].values.map(getHigh).concat(forceY))\n                ]\n            ).range(yRange || [availableHeight, 0]);\n\n            // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point\n            if (x.domain()[0] === x.domain()[1])\n                x.domain()[0] ?\n                    x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])\n                    : x.domain([-1,1]);\n\n            if (y.domain()[0] === y.domain()[1])\n                y.domain()[0] ?\n                    y.domain([y.domain()[0] + y.domain()[0] * 0.01, y.domain()[1] - y.domain()[1] * 0.01])\n                    : y.domain([-1,1]);\n\n            // Setup containers and skeleton of chart\n            var wrap = d3.select(this).selectAll('g.nv-wrap.nv-ohlcBar').data([data[0].values]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-ohlcBar');\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-ticks');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            container\n                .on('click', function(d,i) {\n                    dispatch.chartClick({\n                        data: d,\n                        index: i,\n                        pos: d3.event,\n                        id: id\n                    });\n                });\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-chart-clip-path-' + id)\n                .append('rect');\n\n            wrap.select('#nv-chart-clip-path-' + id + ' rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            g   .attr('clip-path', clipEdge ? 'url(#nv-chart-clip-path-' + id + ')' : '');\n\n            var ticks = wrap.select('.nv-ticks').selectAll('.nv-tick')\n                .data(function(d) { return d });\n            ticks.exit().remove();\n\n            ticks.enter().append('path')\n                .attr('class', function(d,i,j) { return (getOpen(d,i) > getClose(d,i) ? 'nv-tick negative' : 'nv-tick positive') + ' nv-tick-' + j + '-' + i })\n                .attr('d', function(d,i) {\n                    return 'm0,0l0,'\n                        + (y(getOpen(d,i))\n                            - y(getHigh(d,i)))\n                        + 'l'\n                        + (-w/2)\n                        + ',0l'\n                        + (w/2)\n                        + ',0l0,'\n                        + (y(getLow(d,i)) - y(getOpen(d,i)))\n                        + 'l0,'\n                        + (y(getClose(d,i))\n                            - y(getLow(d,i)))\n                        + 'l'\n                        + (w/2)\n                        + ',0l'\n                        + (-w/2)\n                        + ',0z';\n                })\n                .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',' + y(getHigh(d,i)) + ')'; })\n                .attr('fill', function(d,i) { return color[0]; })\n                .attr('stroke', function(d,i) { return color[0]; })\n                .attr('x', 0 )\n                .attr('y', function(d,i) {  return y(Math.max(0, getY(d,i))) })\n                .attr('height', function(d,i) { return Math.abs(y(getY(d,i)) - y(0)) });\n\n            // the bar colors are controlled by CSS currently\n            ticks.attr('class', function(d,i,j) {\n                return (getOpen(d,i) > getClose(d,i) ? 'nv-tick negative' : 'nv-tick positive') + ' nv-tick-' + j + '-' + i;\n            });\n\n            d3.transition(ticks)\n                .attr('transform', function(d,i) { return 'translate(' + x(getX(d,i)) + ',' + y(getHigh(d,i)) + ')'; })\n                .attr('d', function(d,i) {\n                    var w = (availableWidth / data[0].values.length) * .9;\n                    return 'm0,0l0,'\n                        + (y(getOpen(d,i))\n                            - y(getHigh(d,i)))\n                        + 'l'\n                        + (-w/2)\n                        + ',0l'\n                        + (w/2)\n                        + ',0l0,'\n                        + (y(getLow(d,i))\n                            - y(getOpen(d,i)))\n                        + 'l0,'\n                        + (y(getClose(d,i))\n                            - y(getLow(d,i)))\n                        + 'l'\n                        + (w/2)\n                        + ',0l'\n                        + (-w/2)\n                        + ',0z';\n                });\n        });\n\n        return chart;\n    }\n\n\n    //Create methods to allow outside functions to highlight a specific bar.\n    chart.highlightPoint = function(pointIndex, isHoverOver) {\n        chart.clearHighlights();\n        container.select(\".nv-ohlcBar .nv-tick-0-\" + pointIndex)\n            .classed(\"hover\", isHoverOver)\n        ;\n    };\n\n    chart.clearHighlights = function() {\n        container.select(\".nv-ohlcBar .nv-tick.hover\")\n            .classed(\"hover\", false)\n        ;\n    };\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:    {get: function(){return width;}, set: function(_){width=_;}},\n        height:   {get: function(){return height;}, set: function(_){height=_;}},\n        xScale:   {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:   {get: function(){return y;}, set: function(_){y=_;}},\n        xDomain:  {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain:  {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:   {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:   {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        forceX:   {get: function(){return forceX;}, set: function(_){forceX=_;}},\n        forceY:   {get: function(){return forceY;}, set: function(_){forceY=_;}},\n        padData:  {get: function(){return padData;}, set: function(_){padData=_;}},\n        clipEdge: {get: function(){return clipEdge;}, set: function(_){clipEdge=_;}},\n        id:       {get: function(){return id;}, set: function(_){id=_;}},\n        interactive: {get: function(){return interactive;}, set: function(_){interactive=_;}},\n\n        x:     {get: function(){return getX;}, set: function(_){getX=_;}},\n        y:     {get: function(){return getY;}, set: function(_){getY=_;}},\n        open:  {get: function(){return getOpen();}, set: function(_){getOpen=_;}},\n        close: {get: function(){return getClose();}, set: function(_){getClose=_;}},\n        high:  {get: function(){return getHigh;}, set: function(_){getHigh=_;}},\n        low:   {get: function(){return getLow;}, set: function(_){getLow=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    != undefined ? _.top    : margin.top;\n            margin.right  = _.right  != undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom != undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   != undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","// Code adapted from Jason Davies' \"Parallel Coordinates\"\n// http://bl.ocks.org/jasondavies/1341281\nnv.models.parallelCoordinates = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 30, right: 0, bottom: 10, left: 0}\n        , width = null\n        , height = null\n        , availableWidth = null\n        , availableHeight = null\n        , x = d3.scale.ordinal()\n        , y = {}\n        , undefinedValuesLabel = \"undefined values\"\n        , dimensionData = []\n        , enabledDimensions = []\n        , dimensionNames = []\n        , displayBrush = true\n        , color = nv.utils.defaultColor()\n        , filters = []\n        , active = []\n        , dragging = []\n        , axisWithUndefinedValues = []\n        , lineTension = 1\n        , foreground\n        , background\n        , dimensions\n        , line = d3.svg.line()\n        , axis = d3.svg.axis()\n        , dispatch = d3.dispatch('brushstart', 'brush', 'brushEnd', 'dimensionsOrder', \"stateChange\", 'elementClick', 'elementMouseover', 'elementMouseout', 'elementMousemove', 'renderEnd', 'activeChanged')\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var container = d3.select(this);\n            availableWidth = nv.utils.availableWidth(width, container, margin);\n            availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            nv.utils.initSVG(container);\n\n           //Convert old data to new format (name, values)\n            if (data[0].values === undefined) {\n                var newData = [];\n                data.forEach(function (d) {\n                        var val = {};\n                        var key = Object.keys(d);\n                        key.forEach(function (k) { if (k !== \"name\") val[k] = d[k] });\n                        newData.push({ key: d.name, values: val });\n                });\n                data = newData;\n            }\n\n            var dataValues = data.map(function (d) {return d.values});\n            if (active.length === 0) {\n                active = data;\n            }; //set all active before first brush call\n            \n            dimensionNames = dimensionData.sort(function (a, b) { return a.currentPosition - b.currentPosition; }).map(function (d) { return d.key });\n            enabledDimensions = dimensionData.filter(function (d) { return !d.disabled; });\n            \n            // Setup Scales\n            x.rangePoints([0, availableWidth], 1).domain(enabledDimensions.map(function (d) { return d.key; }));\n\n            //Set as true if all values on an axis are missing.\n            // Extract the list of dimensions and create a scale for each.\n            var oldDomainMaxValue = {};\n            var displayMissingValuesline = false;\n            var currentTicks = [];\n            \n            dimensionNames.forEach(function(d) {\n                var extent = d3.extent(dataValues, function (p) { return +p[d]; });\n                var min = extent[0];\n                var max = extent[1];\n                var onlyUndefinedValues = false;\n                //If there is no values to display on an axis, set the extent to 0\n                if (isNaN(min) || isNaN(max)) {\n                    onlyUndefinedValues = true;\n                    min = 0;\n                    max = 0;\n                }\n                //Scale axis if there is only one value\n                if (min === max) {\n                    min = min - 1;\n                    max = max + 1;\n                }\n                var f = filters.filter(function (k) { return k.dimension == d; });\n                if (f.length !== 0) {\n                    //If there is only NaN values, keep the existing domain.\n                    if (onlyUndefinedValues) {\n                        min = y[d].domain()[0];\n                        max = y[d].domain()[1];\n                    }\n                        //If the brush extent is > max (< min), keep the extent value.\n                    else if (!f[0].hasOnlyNaN && displayBrush) {\n                        min = min > f[0].extent[0] ? f[0].extent[0] : min;\n                        max = max < f[0].extent[1] ? f[0].extent[1] : max;\n                    }\n                        //If there is NaN values brushed be sure the brush extent is on the domain.\n                    else if (f[0].hasNaN) {\n                        max = max < f[0].extent[1] ? f[0].extent[1] : max;\n                        oldDomainMaxValue[d] = y[d].domain()[1];\n                        displayMissingValuesline = true;\n                    }\n                }\n                //Use 90% of (availableHeight - 12) for the axis range, 12 reprensenting the space necessary to display \"undefined values\" text.\n                //The remaining 10% are used to display the missingValue line.\n                y[d] = d3.scale.linear()\n                    .domain([min, max])\n                    .range([(availableHeight - 12) * 0.9, 0]);\n\n                axisWithUndefinedValues = [];\n                y[d].brush = d3.svg.brush().y(y[d]).on('brushstart', brushstart).on('brush', brush).on('brushend', brushend);\n            });\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-parallelCoordinates').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-parallelCoordinates');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-parallelCoordinates background');\n            gEnter.append('g').attr('class', 'nv-parallelCoordinates foreground');\n            gEnter.append('g').attr('class', 'nv-parallelCoordinates missingValuesline');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            line.interpolate('cardinal').tension(lineTension);\n            axis.orient('left');\n            var axisDrag = d3.behavior.drag()\n                        .on('dragstart', dragStart)\n                        .on('drag', dragMove)\n                        .on('dragend', dragEnd);\n\n            //Add missing value line at the bottom of the chart\n            var missingValuesline, missingValueslineText;\n            var step = x.range()[1] - x.range()[0];\n            step = isNaN(step) ? x.range()[0] : step;\n            if (!isNaN(step)) {\n                var lineData = [0 + step / 2, availableHeight - 12, availableWidth - step / 2, availableHeight - 12];\n                missingValuesline = wrap.select('.missingValuesline').selectAll('line').data([lineData]);\n                missingValuesline.enter().append('line');\n                missingValuesline.exit().remove();\n                missingValuesline.attr(\"x1\", function(d) { return d[0]; })\n                        .attr(\"y1\", function(d) { return d[1]; })\n                        .attr(\"x2\", function(d) { return d[2]; })\n                        .attr(\"y2\", function(d) { return d[3]; });\n    \n                //Add the text \"undefined values\" under the missing value line\n                missingValueslineText = wrap.select('.missingValuesline').selectAll('text').data([undefinedValuesLabel]);\n                missingValueslineText.append('text').data([undefinedValuesLabel]);\n                missingValueslineText.enter().append('text');\n                missingValueslineText.exit().remove();\n                missingValueslineText.attr(\"y\", availableHeight)\n                        //To have the text right align with the missingValues line, substract 92 representing the text size.\n                        .attr(\"x\", availableWidth - 92 - step / 2)\n                        .text(function(d) { return d; });\n            }\n            // Add grey background lines for context.\n            background = wrap.select('.background').selectAll('path').data(data);\n            background.enter().append('path');\n            background.exit().remove();\n            background.attr('d', path);\n\n            // Add blue foreground lines for focus.\n            foreground = wrap.select('.foreground').selectAll('path').data(data);\n            foreground.enter().append('path')\n            foreground.exit().remove();\n            foreground.attr('d', path)\n                .style(\"stroke-width\", function (d, i) {\n                if (isNaN(d.strokeWidth)) { d.strokeWidth = 1;} return d.strokeWidth;})\n                .attr('stroke', function (d, i) { return d.color || color(d, i); });\n            foreground.on(\"mouseover\", function (d, i) {\n                d3.select(this).classed('hover', true).style(\"stroke-width\", d.strokeWidth + 2 + \"px\").style(\"stroke-opacity\", 1);\n                dispatch.elementMouseover({\n                    label: d.name,\n                    color: d.color || color(d, i),\n                    values: d.values,\n                    dimensions: enabledDimensions\n                });\n\n            });\n            foreground.on(\"mouseout\", function (d, i) {\n                d3.select(this).classed('hover', false).style(\"stroke-width\", d.strokeWidth + \"px\").style(\"stroke-opacity\", 0.7);\n                dispatch.elementMouseout({\n                    label: d.name,\n                    index: i\n                });\n            });\n            foreground.on('mousemove', function (d, i) {\n                dispatch.elementMousemove();\n            });\n            foreground.on('click', function (d) {\n                dispatch.elementClick({\n                    id: d.id\n                });\n            });\n            // Add a group element for each dimension.\n            dimensions = g.selectAll('.dimension').data(enabledDimensions);\n            var dimensionsEnter = dimensions.enter().append('g').attr('class', 'nv-parallelCoordinates dimension');\n\n            dimensions.attr('transform', function(d) { return 'translate(' + x(d.key) + ',0)'; });\n            dimensionsEnter.append('g').attr('class', 'nv-axis');\n\n            // Add an axis and title.\n            dimensionsEnter.append('text')\n                .attr('class', 'nv-label')\n                .style(\"cursor\", \"move\")\n                .attr('dy', '-1em')\n                .attr('text-anchor', 'middle')\n                .on(\"mouseover\", function(d, i) {\n                    dispatch.elementMouseover({\n                        label: d.tooltip || d.key,\n                        color: d.color \n                    });\n                })\n                .on(\"mouseout\", function(d, i) {\n                    dispatch.elementMouseout({\n                        label: d.tooltip\n                    });\n                })\n                .on('mousemove', function (d, i) {\n                    dispatch.elementMousemove();\n                })\n                .call(axisDrag);\n\n            dimensionsEnter.append('g').attr('class', 'nv-brushBackground');\n            dimensions.exit().remove();\n            dimensions.select('.nv-label').text(function (d) { return d.key });\n\n            // Add and store a brush for each axis.\n            restoreBrush(displayBrush);\n\n            var actives = dimensionNames.filter(function (p) { return !y[p].brush.empty(); }),\n                    extents = actives.map(function (p) { return y[p].brush.extent(); });\n            var formerActive = active.slice(0);\n\n            //Restore active values\n            active = [];\n            foreground.style(\"display\", function (d) {\n                var isActive = actives.every(function (p, i) {\n                    if ((isNaN(d.values[p]) || isNaN(parseFloat(d.values[p]))) && extents[i][0] == y[p].brush.y().domain()[0]) {\n                        return true;\n                    }\n                    return (extents[i][0] <= d.values[p] && d.values[p] <= extents[i][1]) && !isNaN(parseFloat(d.values[p]));\n                });\n                if (isActive)\n                    active.push(d);\n                return !isActive ? \"none\" : null;\n\n            });\n\n            if (filters.length > 0 || !nv.utils.arrayEquals(active, formerActive)) {\n               dispatch.activeChanged(active);\n            }\n\n            // Returns the path for a given data point.\n            function path(d) {\n                return line(enabledDimensions.map(function (p) {\n                    //If value if missing, put the value on the missing value line\n                    if (isNaN(d.values[p.key]) || isNaN(parseFloat(d.values[p.key])) || displayMissingValuesline) {\n                        var domain = y[p.key].domain();\n                        var range = y[p.key].range();\n                        var min = domain[0] - (domain[1] - domain[0]) / 9;\n\n                        //If it's not already the case, allow brush to select undefined values\n                        if (axisWithUndefinedValues.indexOf(p.key) < 0) {\n\n                            var newscale = d3.scale.linear().domain([min, domain[1]]).range([availableHeight - 12, range[1]]);\n                            y[p.key].brush.y(newscale);\n                            axisWithUndefinedValues.push(p.key);\n                        }\n                        if (isNaN(d.values[p.key]) || isNaN(parseFloat(d.values[p.key]))) {\n                            return [x(p.key), y[p.key](min)];\n                        }\n                    }\n\n                    //If parallelCoordinate contain missing values show the missing values line otherwise, hide it.\n                    if (missingValuesline !== undefined) {\n                        if (axisWithUndefinedValues.length > 0 || displayMissingValuesline) {\n                            missingValuesline.style(\"display\", \"inline\");\n                            missingValueslineText.style(\"display\", \"inline\");\n                        } else {\n                            missingValuesline.style(\"display\", \"none\");\n                            missingValueslineText.style(\"display\", \"none\");\n                        }\n                    }\n                    return [x(p.key), y[p.key](d.values[p.key])];\n                }));\n            }\n\n            function restoreBrush(visible) {\n                filters.forEach(function (f) {\n                    //If filter brushed NaN values, keep the brush on the bottom of the axis.\n                    var brushDomain = y[f.dimension].brush.y().domain();\n                    if (f.hasOnlyNaN) {\n                        f.extent[1] = (y[f.dimension].domain()[1] - brushDomain[0]) * (f.extent[1] - f.extent[0]) / (oldDomainMaxValue[f.dimension] - f.extent[0]) + brushDomain[0];\n                    }\n                    if (f.hasNaN) {\n                        f.extent[0] = brushDomain[0];\n                    }\n                    if (visible)\n                        y[f.dimension].brush.extent(f.extent);\n                });\n                \n                dimensions.select('.nv-brushBackground')\n                    .each(function (d) {\n                        d3.select(this).call(y[d.key].brush);\n\n                    })\n                    .selectAll('rect')\n                    .attr('x', -8)\n                    .attr('width', 16);\n                \n                updateTicks();\n            }\n            \n            // Handles a brush event, toggling the display of foreground lines.\n            function brushstart() {\n                //If brush aren't visible, show it before brushing again.\n                if (displayBrush === false) {\n                    displayBrush = true;\n                    restoreBrush(true);\n                }\n            }\n            \n            // Handles a brush event, toggling the display of foreground lines.\n            function brush() {\n                actives = dimensionNames.filter(function (p) { return !y[p].brush.empty(); });\n                extents = actives.map(function(p) { return y[p].brush.extent(); });\n\n                filters = []; //erase current filters\n                actives.forEach(function(d,i) {\n                    filters[i] = {\n                        dimension: d,\n                        extent: extents[i],\n                        hasNaN: false,\n                        hasOnlyNaN: false\n                    }\n                });\n\n                active = []; //erase current active list\n                foreground.style('display', function(d) {\n                    var isActive = actives.every(function(p, i) {\n                        if ((isNaN(d.values[p]) || isNaN(parseFloat(d.values[p]))) && extents[i][0] == y[p].brush.y().domain()[0]) return true;\n                        return (extents[i][0] <= d.values[p] && d.values[p] <= extents[i][1]) && !isNaN(parseFloat(d.values[p]));\n                    });\n                    if (isActive) active.push(d);\n                    return isActive ? null : 'none';\n                });\n                \n                updateTicks();\n                \n                dispatch.brush({\n                    filters: filters,\n                    active: active\n                });\n            }\n            function brushend() {\n                var hasActiveBrush = actives.length > 0 ? true : false;\n                filters.forEach(function (f) {\n                    if (f.extent[0] === y[f.dimension].brush.y().domain()[0] && axisWithUndefinedValues.indexOf(f.dimension) >= 0)\n                        f.hasNaN = true;\n                    if (f.extent[1] < y[f.dimension].domain()[0])\n                        f.hasOnlyNaN = true;\n                });\n                dispatch.brushEnd(active, hasActiveBrush);\n            }           \n            function updateTicks() {\n                dimensions.select('.nv-axis')\n                    .each(function (d, i) {\n                        var f = filters.filter(function (k) { return k.dimension == d.key; });\n                        currentTicks[d.key] = y[d.key].domain();\n                        \n                        //If brush are available, display brush extent\n                        if (f.length != 0 && displayBrush)\n                        {\n                            currentTicks[d.key] = [];\n                            if (f[0].extent[1] > y[d.key].domain()[0]) \n                                currentTicks[d.key] = [f[0].extent[1]];\n                            if (f[0].extent[0] >= y[d.key].domain()[0])\n                                currentTicks[d.key].push(f[0].extent[0]);    \n                        }\n                            \n                        d3.select(this).call(axis.scale(y[d.key]).tickFormat(d.format).tickValues(currentTicks[d.key]));\n                });\n            }\n            function dragStart(d) {\n                dragging[d.key] = this.parentNode.__origin__ = x(d.key);\n                background.attr(\"visibility\", \"hidden\");\n            }\n            function dragMove(d) {\n                dragging[d.key] = Math.min(availableWidth, Math.max(0, this.parentNode.__origin__ += d3.event.x));\n                foreground.attr(\"d\", path);\n                enabledDimensions.sort(function (a, b) { return dimensionPosition(a.key) - dimensionPosition(b.key); });\n                enabledDimensions.forEach(function (d, i) { return d.currentPosition = i; });\n                x.domain(enabledDimensions.map(function (d) { return d.key; }));\n                dimensions.attr(\"transform\", function(d) { return \"translate(\" + dimensionPosition(d.key) + \")\"; });\n            }\n            function dragEnd(d, i) {\n                delete this.parentNode.__origin__;\n                delete dragging[d.key];\n                d3.select(this.parentNode).attr(\"transform\", \"translate(\" + x(d.key) + \")\");\n                foreground\n                  .attr(\"d\", path);\n                background\n                  .attr(\"d\", path)\n                  .attr(\"visibility\", null);\n\n                dispatch.dimensionsOrder(enabledDimensions);\n            }\n            function dimensionPosition(d) {\n                var v = dragging[d];\n                return v == null ? x(d) : v;\n            }\n        });\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:         {get: function(){return width;},           set: function(_){width= _;}},\n        height:        {get: function(){return height;},          set: function(_){height= _;}},\n        dimensionData: { get: function () { return dimensionData; }, set: function (_) { dimensionData = _; } },\n        displayBrush: { get: function () { return displayBrush; }, set: function (_) { displayBrush = _; } },\n        filters: { get: function () { return filters; }, set: function (_) { filters = _; } },\n        active: { get: function () { return active; }, set: function (_) { active = _; } },\n        lineTension:   {get: function(){return lineTension;},     set: function(_){lineTension = _;}},\n        undefinedValuesLabel : {get: function(){return undefinedValuesLabel;}, set: function(_){undefinedValuesLabel=_;}},\n        \n        // deprecated options\n        dimensions: {get: function () { return dimensionData.map(function (d){return d.key}); }, set: function (_) {\n            // deprecated after 1.8.1\n            nv.deprecated('dimensions', 'use dimensionData instead');\n            if (dimensionData.length === 0) {\n                _.forEach(function (k) { dimensionData.push({ key: k }) })\n            } else {\n                _.forEach(function (k, i) { dimensionData[i].key= k })\n            }\n        }},\n        dimensionNames: {get: function () { return dimensionData.map(function (d){return d.key}); }, set: function (_) {\n            // deprecated after 1.8.1\n            nv.deprecated('dimensionNames', 'use dimensionData instead');\n            dimensionNames = [];\n            if (dimensionData.length === 0) {\n                _.forEach(function (k) { dimensionData.push({ key: k }) })\n            } else {\n                _.forEach(function (k, i) { dimensionData[i].key = k })\n            }\n \n        }},\n        dimensionFormats: {get: function () { return dimensionData.map(function (d) { return d.format }); }, set: function (_) {\n            // deprecated after 1.8.1\n            nv.deprecated('dimensionFormats', 'use dimensionData instead');\n            if (dimensionData.length === 0) {\n                _.forEach(function (f) { dimensionData.push({ format: f }) })\n            } else {\n                _.forEach(function (f, i) { dimensionData[i].format = f })\n            }\n\n        }},\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    =  _.top    !== undefined ? _.top    : margin.top;\n            margin.right  =  _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom =  _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   =  _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","nv.models.parallelCoordinatesChart = function () {\n        \"use strict\";\n        //============================================================\n        // Public Variables with Default Settings\n        //------------------------------------------------------------\n\n        var parallelCoordinates = nv.models.parallelCoordinates()\n        var legend = nv.models.legend()\n        var tooltip = nv.models.tooltip();\n        var dimensionTooltip = nv.models.tooltip();\n\n        var margin = { top: 0, right: 0, bottom: 0, left: 0 }\n        , marginTop = null\n        , width = null\n        , height = null\n        , showLegend = true\n        , color = nv.utils.defaultColor()\n        , state = nv.utils.state()\n        , dimensionData = []\n        , displayBrush = true\n        , defaultState = null\n        , noData = null\n        , nanValue = \"undefined\"\n        , dispatch = d3.dispatch('dimensionsOrder', 'brushEnd', 'stateChange', 'changeState', 'renderEnd')\n        , controlWidth = function () { return showControls ? 180 : 0 }\n        ;\n\n\t    //============================================================\n\n\t\t//============================================================\n        // Private Variables\n        //------------------------------------------------------------\n\n        var renderWatch = nv.utils.renderWatch(dispatch);\n\n        var stateGetter = function(data) {\n            return function() {\n                return {\n                    active: data.map(function(d) { return !d.disabled })\n                };\n            }\n        };\n\n        var stateSetter = function(data) {\n            return function(state) {\n                if(state.active !== undefined) {\n                    data.forEach(function(series, i) {\n                        series.disabled = !state.active[i];\n                    });\n                }\n            }\n        };\n\n        tooltip.contentGenerator(function(data) {\n            var str = '<table><thead><tr><td class=\"legend-color-guide\"><div style=\"background-color:' + data.color + '\"></div></td><td><strong>' + data.key + '</strong></td></tr></thead>';\n            if(data.series.length !== 0)\n            {\n                str = str + '<tbody><tr><td height =\"10px\"></td></tr>';\n                data.series.forEach(function(d){\n                    str = str + '<tr><td class=\"legend-color-guide\"><div style=\"background-color:' + d.color + '\"></div></td><td class=\"key\">' + d.key + '</td><td class=\"value\">' + d.value + '</td></tr>';\n                });\n                str = str + '</tbody>';\n            }\n            str = str + '</table>';\n            return str;\n        });\n\n        //============================================================\n        // Chart function\n        //------------------------------------------------------------\n\n        function chart(selection) {\n            renderWatch.reset();\n            renderWatch.models(parallelCoordinates);\n\n            selection.each(function(data) {\n                var container = d3.select(this);\n                nv.utils.initSVG(container);\n\n                var that = this;\n\n                var availableWidth = nv.utils.availableWidth(width, container, margin),\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n\n                chart.update = function() { container.call(chart); };\n                chart.container = this;\n\n                state.setter(stateSetter(dimensionData), chart.update)\n                    .getter(stateGetter(dimensionData))\n                    .update();\n\n                //set state.disabled\n                state.disabled = dimensionData.map(function (d) { return !!d.disabled });\n\n                //Keep dimensions position in memory\n                dimensionData = dimensionData.map(function (d) {d.disabled = !!d.disabled; return d});\n                dimensionData.forEach(function (d, i) {\n                    d.originalPosition = isNaN(d.originalPosition) ? i : d.originalPosition;\n                    d.currentPosition = isNaN(d.currentPosition) ? i : d.currentPosition;\n                });\n\n               if (!defaultState) {\n                    var key;\n                    defaultState = {};\n                    for(key in state) {\n                        if(state[key] instanceof Array)\n                            defaultState[key] = state[key].slice(0);\n                        else\n                            defaultState[key] = state[key];\n                    }\n                }\n\n                // Display No Data message if there's nothing to show.\n                if(!data || !data.length) {\n                    nv.utils.noData(chart, container);\n                    return chart;\n                } else {\n                    container.selectAll('.nv-noData').remove();\n                }\n\n                //------------------------------------------------------------\n                // Setup containers and skeleton of chart\n\n                var wrap = container.selectAll('g.nv-wrap.nv-parallelCoordinatesChart').data([data]);\n                var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-parallelCoordinatesChart').append('g');\n\n                var g = wrap.select('g');\n\n                gEnter.append('g').attr('class', 'nv-parallelCoordinatesWrap');\n                gEnter.append('g').attr('class', 'nv-legendWrap');\n\n                g.select(\"rect\")\n                    .attr(\"width\", availableWidth)\n                    .attr(\"height\", (availableHeight > 0) ? availableHeight : 0);\n\n                // Legend\n                if (!showLegend) {\n                    g.select('.nv-legendWrap').selectAll('*').remove();\n                } else {\n                    legend.width(availableWidth)\n                        .color(function (d) { return \"rgb(188,190,192)\"; });\n\n                    g.select('.nv-legendWrap')\n                        .datum(dimensionData.sort(function (a, b) { return a.originalPosition - b.originalPosition; }))\n                        .call(legend);\n\n                    if (!marginTop && legend.height() !== margin.top) {\n                        margin.top = legend.height();\n                        availableHeight = nv.utils.availableHeight(height, container, margin);\n                    }\n                    wrap.select('.nv-legendWrap')\n                       .attr('transform', 'translate( 0 ,' + (-margin.top) + ')');\n                }\n                wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n                // Main Chart Component(s)\n                parallelCoordinates\n                    .width(availableWidth)\n                    .height(availableHeight)\n                    .dimensionData(dimensionData)\n                    .displayBrush(displayBrush);\n\n\t\t        var parallelCoordinatesWrap = g.select('.nv-parallelCoordinatesWrap ')\n                  .datum(data);\n\n\t\t        parallelCoordinatesWrap.transition().call(parallelCoordinates);\n\n\t\t\t\t//============================================================\n                // Event Handling/Dispatching (in chart's scope)\n                //------------------------------------------------------------\n                //Display reset brush button\n\t\t        parallelCoordinates.dispatch.on('brushEnd', function (active, hasActiveBrush) {\n\t\t            if (hasActiveBrush) {\n\t\t                displayBrush = true;\n\t\t                dispatch.brushEnd(active);\n\t\t            } else {\n\n\t\t                displayBrush = false;\n\t\t            }\n\t\t        });\n\n\t\t        legend.dispatch.on('stateChange', function(newState) {\n\t\t            for(var key in newState) {\n\t\t                state[key] = newState[key];\n\t\t            }\n\t\t            dispatch.stateChange(state);\n\t\t            chart.update();\n\t\t        });\n\n                //Update dimensions order and display reset sorting button\n\t\t        parallelCoordinates.dispatch.on('dimensionsOrder', function (e) {\n\t\t            dimensionData.sort(function (a, b) { return a.currentPosition - b.currentPosition; });\n\t\t            var isSorted = false;\n\t\t            dimensionData.forEach(function (d, i) {\n\t\t                d.currentPosition = i;\n\t\t                if (d.currentPosition !== d.originalPosition)\n\t\t                    isSorted = true;\n\t\t            });\n\t\t            dispatch.dimensionsOrder(dimensionData, isSorted);\n\t\t        });\n\n\t\t\t\t// Update chart from a state object passed to event handler\n                dispatch.on('changeState', function (e) {\n\n                    if (typeof e.disabled !== 'undefined') {\n                        dimensionData.forEach(function (series, i) {\n                            series.disabled = e.disabled[i];\n                        });\n                        state.disabled = e.disabled;\n                    }\n                    chart.update();\n                });\n            });\n\n            renderWatch.renderEnd('parraleleCoordinateChart immediate');\n            return chart;\n        }\n\n\t\t//============================================================\n        // Event Handling/Dispatching (out of chart's scope)\n        //------------------------------------------------------------\n\n        parallelCoordinates.dispatch.on('elementMouseover.tooltip', function (evt) {\n            var tp = {\n                key: evt.label,\n                color: evt.color,\n                series: []\n             }\n            if(evt.values){\n                Object.keys(evt.values).forEach(function (d) {\n                    var dim = evt.dimensions.filter(function (dd) {return dd.key === d;})[0];\n                    if(dim){\n                        var v;\n                        if (isNaN(evt.values[d]) || isNaN(parseFloat(evt.values[d]))) {\n                            v = nanValue;\n                        } else {\n                            v = dim.format(evt.values[d]);\n                        }\n                        tp.series.push({ idx: dim.currentPosition, key: d, value: v, color: dim.color });\n                    }\n                });\n                tp.series.sort(function(a,b) {return a.idx - b.idx});\n             }\n            tooltip.data(tp).hidden(false);\n        });\n\n        parallelCoordinates.dispatch.on('elementMouseout.tooltip', function(evt) {\n            tooltip.hidden(true)\n        });\n\n        parallelCoordinates.dispatch.on('elementMousemove.tooltip', function () {\n            tooltip();\n        });\n\t\t //============================================================\n        // Expose Public Variables\n        //------------------------------------------------------------\n\n\t\t// expose chart's sub-components\n        chart.dispatch = dispatch;\n        chart.parallelCoordinates = parallelCoordinates;\n        chart.legend = legend;\n        chart.tooltip = tooltip;\n        chart.options = nv.utils.optionsFunc.bind(chart);\n\n        chart._options = Object.create({}, {\n            // simple options, just get/set the necessary values\n            width: { get: function () { return width; }, set: function (_) { width = _; } },\n            height: { get: function () { return height; }, set: function (_) { height = _; } },\n            showLegend: { get: function () { return showLegend; }, set: function (_) { showLegend = _; } },\n            defaultState: { get: function () { return defaultState; }, set: function (_) { defaultState = _; } },\n            dimensionData: { get: function () { return dimensionData; }, set: function (_) { dimensionData = _; } },\n            displayBrush: { get: function () { return displayBrush; }, set: function (_) { displayBrush = _; } },\n            noData: { get: function () { return noData; }, set: function (_) { noData = _; } },\n            nanValue: { get: function () { return nanValue; }, set: function (_) { nanValue = _; } },\n\n            // options that require extra logic in the setter\n            margin: {\n                get: function () { return margin; },\n                set: function (_) {\n                    if (_.top !== undefined) {\n                        margin.top = _.top;\n                        marginTop = _.top;\n                    }\n                    margin.right = _.right !== undefined ? _.right : margin.right;\n                    margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n                    margin.left = _.left !== undefined ? _.left : margin.left;\n                }\n            },\n            color: {get: function(){return color;}, set: function(_){\n                    color = nv.utils.getColor(_);\n                    legend.color(color);\n                    parallelCoordinates.color(color);\n                }}\n        });\n\n        nv.utils.inheritOptions(chart, parallelCoordinates);\n        nv.utils.initOptions(chart);\n\n        return chart;\n    };\n","nv.models.pie = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 500\n        , height = 500\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container = null\n        , color = nv.utils.defaultColor()\n        , valueFormat = d3.format(',.2f')\n        , showLabels = true\n        , labelsOutside = false\n        , labelType = \"key\"\n        , labelThreshold = .02 //if slice percentage is under this, don't show label\n        , donut = false\n        , title = false\n        , growOnHover = true\n        , titleOffset = 0\n        , labelSunbeamLayout = false\n        , startAngle = false\n        , padAngle = false\n        , endAngle = false\n        , cornerRadius = 0\n        , donutRatio = 0.5\n        , duration = 250\n        , arcsRadius = []\n        , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'elementMousemove', 'renderEnd')\n        ;\n\n    var arcs = [];\n    var arcsOver = [];\n\n    //============================================================\n    // chart function\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right\n                , availableHeight = height - margin.top - margin.bottom\n                , radius = Math.min(availableWidth, availableHeight) / 2\n                , arcsRadiusOuter = []\n                , arcsRadiusInner = []\n                ;\n\n            container = d3.select(this)\n            if (arcsRadius.length === 0) {\n                var outer = radius - radius / 5;\n                var inner = donutRatio * radius;\n                for (var i = 0; i < data[0].length; i++) {\n                    arcsRadiusOuter.push(outer);\n                    arcsRadiusInner.push(inner);\n                }\n            } else {\n                if(growOnHover){\n                    arcsRadiusOuter = arcsRadius.map(function (d) { return (d.outer - d.outer / 5) * radius; });\n                    arcsRadiusInner = arcsRadius.map(function (d) { return (d.inner - d.inner / 5) * radius; });\n                    donutRatio = d3.min(arcsRadius.map(function (d) { return (d.inner - d.inner / 5); }));\n                } else {\n                    arcsRadiusOuter = arcsRadius.map(function (d) { return d.outer * radius; });\n                    arcsRadiusInner = arcsRadius.map(function (d) { return d.inner * radius; });\n                    donutRatio = d3.min(arcsRadius.map(function (d) { return d.inner; }));\n                }\n            }\n            nv.utils.initSVG(container);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('.nv-wrap.nv-pie').data(data);\n            var wrapEnter = wrap.enter().append('g').attr('class','nvd3 nv-wrap nv-pie nv-chart-' + id);\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n            var g_pie = gEnter.append('g').attr('class', 'nv-pie');\n            gEnter.append('g').attr('class', 'nv-pieLabels');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n            g.select('.nv-pie').attr('transform', 'translate(' + availableWidth / 2 + ',' + availableHeight / 2 + ')');\n            g.select('.nv-pieLabels').attr('transform', 'translate(' + availableWidth / 2 + ',' + availableHeight / 2 + ')');\n\n            //\n            container.on('click', function(d,i) {\n                dispatch.chartClick({\n                    data: d,\n                    index: i,\n                    pos: d3.event,\n                    id: id\n                });\n            });\n\n            arcs = [];\n            arcsOver = [];\n            for (var i = 0; i < data[0].length; i++) {\n\n                var arc = d3.svg.arc().outerRadius(arcsRadiusOuter[i]);\n                var arcOver = d3.svg.arc().outerRadius(arcsRadiusOuter[i] + 5);\n\n                if (startAngle !== false) {\n                    arc.startAngle(startAngle);\n                    arcOver.startAngle(startAngle);\n                }\n                if (endAngle !== false) {\n                    arc.endAngle(endAngle);\n                    arcOver.endAngle(endAngle);\n                }\n                if (donut) {\n                    arc.innerRadius(arcsRadiusInner[i]);\n                    arcOver.innerRadius(arcsRadiusInner[i]);\n                }\n\n                if (arc.cornerRadius && cornerRadius) {\n                    arc.cornerRadius(cornerRadius);\n                    arcOver.cornerRadius(cornerRadius);\n                }\n\n                arcs.push(arc);\n                arcsOver.push(arcOver);\n            }\n\n            // Setup the Pie chart and choose the data element\n            var pie = d3.layout.pie()\n                .sort(null)\n                .value(function(d) { return d.disabled ? 0 : getY(d) });\n\n            // padAngle added in d3 3.5\n            if (pie.padAngle && padAngle) {\n                pie.padAngle(padAngle);\n            }\n\n            // if title is specified and donut, put it in the middle\n            if (donut && title) {\n                g_pie.append(\"text\").attr('class', 'nv-pie-title');\n\n                wrap.select('.nv-pie-title')\n                    .style(\"text-anchor\", \"middle\")\n                    .text(function (d) {\n                        return title;\n                    })\n                    .style(\"font-size\", (Math.min(availableWidth, availableHeight)) * donutRatio * 2 / (title.length + 2) + \"px\")\n                    .attr(\"dy\", \"0.35em\") // trick to vertically center text\n                    .attr('transform', function(d, i) {\n                        return 'translate(0, '+ titleOffset + ')';\n                    });\n            }\n\n            var slices = wrap.select('.nv-pie').selectAll('.nv-slice').data(pie);\n            var pieLabels = wrap.select('.nv-pieLabels').selectAll('.nv-label').data(pie);\n\n            slices.exit().remove();\n            pieLabels.exit().remove();\n\n            var ae = slices.enter().append('g');\n            ae.attr('class', 'nv-slice');\n            ae.on('mouseover', function(d, i) {\n                d3.select(this).classed('hover', true);\n                if (growOnHover) {\n                    d3.select(this).select(\"path\").transition()\n                        .duration(70)\n                        .attr(\"d\", arcsOver[i]);\n                }\n                dispatch.elementMouseover({\n                    data: d.data,\n                    index: i,\n                    color: d3.select(this).style(\"fill\"),\n                    percent: (d.endAngle - d.startAngle) / (2 * Math.PI)\n                });\n            });\n            ae.on('mouseout', function(d, i) {\n                d3.select(this).classed('hover', false);\n                if (growOnHover) {\n                    d3.select(this).select(\"path\").transition()\n                        .duration(50)\n                        .attr(\"d\", arcs[i]);\n                }\n                dispatch.elementMouseout({data: d.data, index: i});\n            });\n            ae.on('mousemove', function(d, i) {\n                dispatch.elementMousemove({data: d.data, index: i});\n            });\n            ae.on('click', function(d, i) {\n                var element = this;\n                dispatch.elementClick({\n                    data: d.data,\n                    index: i,\n                    color: d3.select(this).style(\"fill\"),\n                    event: d3.event,\n                    element: element\n                });\n            });\n            ae.on('dblclick', function(d, i) {\n                dispatch.elementDblClick({\n                    data: d.data,\n                    index: i,\n                    color: d3.select(this).style(\"fill\")\n                });\n            });\n\n            slices.attr('fill', function(d,i) { return color(d.data, i); });\n            slices.attr('stroke', function(d,i) { return color(d.data, i); });\n\n            var paths = ae.append('path').each(function(d) {\n                this._current = d;\n            });\n\n            slices.select('path')\n                .transition()\n                .duration(duration)\n                .attr('d', function (d, i) { return arcs[i](d); })\n                .attrTween('d', arcTween);\n\n            if (showLabels) {\n                // This does the normal label\n                var labelsArc = [];\n                for (var i = 0; i < data[0].length; i++) {\n                    labelsArc.push(arcs[i]);\n\n                    if (labelsOutside) {\n                        if (donut) {\n                            labelsArc[i] = d3.svg.arc().outerRadius(arcs[i].outerRadius());\n                            if (startAngle !== false) labelsArc[i].startAngle(startAngle);\n                            if (endAngle !== false) labelsArc[i].endAngle(endAngle);\n                        }\n                    } else if (!donut) {\n                            labelsArc[i].innerRadius(0);\n                    }\n                }\n\n                pieLabels.enter().append(\"g\").classed(\"nv-label\",true).each(function(d,i) {\n                    var group = d3.select(this);\n\n                    group.attr('transform', function (d, i) {\n                        if (labelSunbeamLayout) {\n                            d.outerRadius = arcsRadiusOuter[i] + 10; // Set Outer Coordinate\n                            d.innerRadius = arcsRadiusOuter[i] + 15; // Set Inner Coordinate\n                            var rotateAngle = (d.startAngle + d.endAngle) / 2 * (180 / Math.PI);\n                            if ((d.startAngle + d.endAngle) / 2 < Math.PI) {\n                                rotateAngle -= 90;\n                            } else {\n                                rotateAngle += 90;\n                            }\n                            return 'translate(' + labelsArc[i].centroid(d) + ') rotate(' + rotateAngle + ')';\n                        } else {\n                            d.outerRadius = radius + 10; // Set Outer Coordinate\n                            d.innerRadius = radius + 15; // Set Inner Coordinate\n                            return 'translate(' + labelsArc[i].centroid(d) + ')'\n                        }\n                    });\n\n                    group.append('rect')\n                        .style('stroke', '#fff')\n                        .style('fill', '#fff')\n                        .attr(\"rx\", 3)\n                        .attr(\"ry\", 3);\n\n                    group.append('text')\n                        .style('text-anchor', labelSunbeamLayout ? ((d.startAngle + d.endAngle) / 2 < Math.PI ? 'start' : 'end') : 'middle') //center the text on it's origin or begin/end if orthogonal aligned\n                        .style('fill', '#000')\n                });\n\n                var labelLocationHash = {};\n                var avgHeight = 14;\n                var avgWidth = 140;\n                var createHashKey = function(coordinates) {\n                    return Math.floor(coordinates[0]/avgWidth) * avgWidth + ',' + Math.floor(coordinates[1]/avgHeight) * avgHeight;\n                };\n                var getSlicePercentage = function(d) {\n                    return (d.endAngle - d.startAngle) / (2 * Math.PI);\n                };\n\n                pieLabels.watchTransition(renderWatch, 'pie labels').attr('transform', function (d, i) {\n                    if (labelSunbeamLayout) {\n                        d.outerRadius = arcsRadiusOuter[i] + 10; // Set Outer Coordinate\n                        d.innerRadius = arcsRadiusOuter[i] + 15; // Set Inner Coordinate\n                        var rotateAngle = (d.startAngle + d.endAngle) / 2 * (180 / Math.PI);\n                        if ((d.startAngle + d.endAngle) / 2 < Math.PI) {\n                            rotateAngle -= 90;\n                        } else {\n                            rotateAngle += 90;\n                        }\n                        return 'translate(' + labelsArc[i].centroid(d) + ') rotate(' + rotateAngle + ')';\n                    } else {\n                        d.outerRadius = radius + 10; // Set Outer Coordinate\n                        d.innerRadius = radius + 15; // Set Inner Coordinate\n\n                        /*\n                        Overlapping pie labels are not good. What this attempts to do is, prevent overlapping.\n                        Each label location is hashed, and if a hash collision occurs, we assume an overlap.\n                        Adjust the label's y-position to remove the overlap.\n                        */\n                        var center = labelsArc[i].centroid(d);\n                        var percent = getSlicePercentage(d);\n                        if (d.value && percent >= labelThreshold) {\n                            var hashKey = createHashKey(center);\n                            if (labelLocationHash[hashKey]) {\n                                center[1] -= avgHeight;\n                            }\n                            labelLocationHash[createHashKey(center)] = true;\n                        }\n                        return 'translate(' + center + ')'\n                    }\n                });\n\n                pieLabels.select(\".nv-label text\")\n                    .style('text-anchor', function(d,i) {\n                        //center the text on it's origin or begin/end if orthogonal aligned\n                        return labelSunbeamLayout ? ((d.startAngle + d.endAngle) / 2 < Math.PI ? 'start' : 'end') : 'middle';\n                    })\n                    .text(function(d, i) {\n                        var percent = getSlicePercentage(d);\n                        var label = '';\n                        if (!d.value || percent < labelThreshold) return '';\n\n                        if(typeof labelType === 'function') {\n                            label = labelType(d, i, {\n                                'key': getX(d.data),\n                                'value': getY(d.data),\n                                'percent': valueFormat(percent)\n                            });\n                        } else {\n                            switch (labelType) {\n                                case 'key':\n                                    label = getX(d.data);\n                                    break;\n                                case 'value':\n                                    label = valueFormat(getY(d.data));\n                                    break;\n                                case 'percent':\n                                    label = d3.format('%')(percent);\n                                    break;\n                            }\n                        }\n                        return label;\n                    })\n                ;\n            }\n\n\n            // Computes the angle of an arc, converting from radians to degrees.\n            function angle(d) {\n                var a = (d.startAngle + d.endAngle) * 90 / Math.PI - 90;\n                return a > 90 ? a - 180 : a;\n            }\n\n            function arcTween(a, idx) {\n                a.endAngle = isNaN(a.endAngle) ? 0 : a.endAngle;\n                a.startAngle = isNaN(a.startAngle) ? 0 : a.startAngle;\n                if (!donut) a.innerRadius = 0;\n                var i = d3.interpolate(this._current, a);\n                this._current = i(0);\n                return function (t) {\n                    return arcs[idx](i(t));\n                };\n            }\n        });\n\n        renderWatch.renderEnd('pie immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        arcsRadius: { get: function () { return arcsRadius; }, set: function (_) { arcsRadius = _; } },\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLabels: {get: function(){return showLabels;}, set: function(_){showLabels=_;}},\n        title:      {get: function(){return title;}, set: function(_){title=_;}},\n        titleOffset:    {get: function(){return titleOffset;}, set: function(_){titleOffset=_;}},\n        labelThreshold: {get: function(){return labelThreshold;}, set: function(_){labelThreshold=_;}},\n        valueFormat:    {get: function(){return valueFormat;}, set: function(_){valueFormat=_;}},\n        x:          {get: function(){return getX;}, set: function(_){getX=_;}},\n        id:         {get: function(){return id;}, set: function(_){id=_;}},\n        endAngle:   {get: function(){return endAngle;}, set: function(_){endAngle=_;}},\n        startAngle: {get: function(){return startAngle;}, set: function(_){startAngle=_;}},\n        padAngle:   {get: function(){return padAngle;}, set: function(_){padAngle=_;}},\n        cornerRadius: {get: function(){return cornerRadius;}, set: function(_){cornerRadius=_;}},\n        donutRatio:   {get: function(){return donutRatio;}, set: function(_){donutRatio=_;}},\n        labelsOutside: {get: function(){return labelsOutside;}, set: function(_){labelsOutside=_;}},\n        labelSunbeamLayout: {get: function(){return labelSunbeamLayout;}, set: function(_){labelSunbeamLayout=_;}},\n        donut:              {get: function(){return donut;}, set: function(_){donut=_;}},\n        growOnHover:        {get: function(){return growOnHover;}, set: function(_){growOnHover=_;}},\n\n        // depreciated after 1.7.1\n        pieLabelsOutside: {get: function(){return labelsOutside;}, set: function(_){\n            labelsOutside=_;\n            nv.deprecated('pieLabelsOutside', 'use labelsOutside instead');\n        }},\n        // depreciated after 1.7.1\n        donutLabelsOutside: {get: function(){return labelsOutside;}, set: function(_){\n            labelsOutside=_;\n            nv.deprecated('donutLabelsOutside', 'use labelsOutside instead');\n        }},\n        // deprecated after 1.7.1\n        labelFormat: {get: function(){ return valueFormat;}, set: function(_) {\n            valueFormat=_;\n            nv.deprecated('labelFormat','use valueFormat instead');\n        }},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = typeof _.top    != 'undefined' ? _.top    : margin.top;\n            margin.right  = typeof _.right  != 'undefined' ? _.right  : margin.right;\n            margin.bottom = typeof _.bottom != 'undefined' ? _.bottom : margin.bottom;\n            margin.left   = typeof _.left   != 'undefined' ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n        }},\n        y: {get: function(){return getY;}, set: function(_){\n            getY=d3.functor(_);\n        }},\n        color: {get: function(){return color;}, set: function(_){\n            color=nv.utils.getColor(_);\n        }},\n        labelType:          {get: function(){return labelType;}, set: function(_){\n            labelType= _ || 'key';\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","nv.models.pieChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var pie = nv.models.pie();\n    var legend = nv.models.legend();\n    var tooltip = nv.models.tooltip();\n\n    var margin = {top: 30, right: 20, bottom: 20, left: 20}\n        , marginTop = null\n        , width = null\n        , height = null\n        , showTooltipPercent = false\n        , showLegend = true\n        , legendPosition = \"top\"\n        , color = nv.utils.defaultColor()\n        , state = nv.utils.state()\n        , defaultState = null\n        , noData = null\n        , duration = 250\n        , dispatch = d3.dispatch('stateChange', 'changeState','renderEnd')\n        ;\n\n    tooltip\n        .duration(0)\n        .headerEnabled(false)\n        .valueFormatter(function(d, i) {\n            return pie.valueFormat()(d, i);\n        });\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled })\n            };\n        }\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.active !== undefined) {\n                data.forEach(function (series, i) {\n                    series.disabled = !state.active[i];\n                });\n            }\n        }\n    };\n\n    //============================================================\n    // Chart function\n    //------------------------------------------------------------\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(pie);\n\n        selection.each(function(data) {\n            var container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            var that = this;\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() { container.transition().call(chart); };\n            chart.container = this;\n\n            state.setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            //set state.disabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length) {\n                nv.utils.noData(chart, container);\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-pieChart').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-pieChart').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-pieWrap');\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                if (legendPosition === \"top\") {\n                    legend.width( availableWidth ).key(pie.x());\n\n                    wrap.select('.nv-legendWrap')\n                        .datum(data)\n                        .call(legend);\n\n                    if (!marginTop && legend.height() !== margin.top) {\n                        margin.top = legend.height();\n                        availableHeight = nv.utils.availableHeight(height, container, margin);\n                    }\n\n                    wrap.select('.nv-legendWrap')\n                        .attr('transform', 'translate(0,' + (-margin.top) +')');\n                } else if (legendPosition === \"right\") {\n                    var legendWidth = nv.models.legend().width();\n                    if (availableWidth / 2 < legendWidth) {\n                        legendWidth = (availableWidth / 2)\n                    }\n                    legend.height(availableHeight).key(pie.x());\n                    legend.width(legendWidth);\n                    availableWidth -= legend.width();\n\n                    wrap.select('.nv-legendWrap')\n                        .datum(data)\n                        .call(legend)\n                        .attr('transform', 'translate(' + (availableWidth) +',0)');\n                }\n            }\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            // Main Chart Component(s)\n            pie.width(availableWidth).height(availableHeight);\n            var pieWrap = g.select('.nv-pieWrap').datum([data]);\n            d3.transition(pieWrap).call(pie);\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState) {\n                    state[key] = newState[key];\n                }\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            // Update chart from a state object passed to event handler\n            dispatch.on('changeState', function(e) {\n                if (typeof e.disabled !== 'undefined') {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n                    state.disabled = e.disabled;\n                }\n                chart.update();\n            });\n        });\n\n        renderWatch.renderEnd('pieChart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    pie.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt['series'] = {\n            key: chart.x()(evt.data),\n            value: chart.y()(evt.data),\n            color: evt.color,\n            percent: evt.percent\n        };\n        if (!showTooltipPercent) {\n            delete evt.percent;\n            delete evt.series.percent;\n        }\n        tooltip.data(evt).hidden(false);\n    });\n\n    pie.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    pie.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.legend = legend;\n    chart.dispatch = dispatch;\n    chart.pie = pie;\n    chart.tooltip = tooltip;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    // use Object get/set functionality to map between vars and chart functions\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:              {get: function(){return width;},                set: function(_){width=_;}},\n        height:             {get: function(){return height;},               set: function(_){height=_;}},\n        noData:             {get: function(){return noData;},               set: function(_){noData=_;}},\n        showTooltipPercent: {get: function(){return showTooltipPercent;},   set: function(_){showTooltipPercent=_;}},\n        showLegend:         {get: function(){return showLegend;},           set: function(_){showLegend=_;}},\n        legendPosition:     {get: function(){return legendPosition;},       set: function(_){legendPosition=_;}},\n        defaultState:       {get: function(){return defaultState;},         set: function(_){defaultState=_;}},\n\n        // options that require extra logic in the setter\n        color: {get: function(){return color;}, set: function(_){\n            color = _;\n            legend.color(color);\n            pie.color(color);\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            pie.duration(duration);\n        }},\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }}\n    });\n    nv.utils.inheritOptions(chart, pie);\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","nv.models.sankey = function() {\n    'use strict';\n\n    // Sources:\n    // - https://bost.ocks.org/mike/sankey/\n    // - https://github.com/soxofaan/d3-plugin-captain-sankey\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var sankey = {},\n        nodeWidth = 24,\n        nodePadding = 8,\n        size = [1, 1],\n        nodes = [],\n        links = [],\n        sinksRight = true;\n\n    var layout = function(iterations) {\n        computeNodeLinks();\n        computeNodeValues();\n        computeNodeBreadths();\n        computeNodeDepths(iterations);\n    };\n\n    var relayout = function() {\n        computeLinkDepths();\n    };\n\n    // SVG path data generator, to be used as 'd' attribute on 'path' element selection.\n    var link = function() {\n        var curvature = .5;\n\n        function link(d) {\n\n            var x0 = d.source.x + d.source.dx,\n                x1 = d.target.x,\n                xi = d3.interpolateNumber(x0, x1),\n                x2 = xi(curvature),\n                x3 = xi(1 - curvature),\n                y0 = d.source.y + d.sy + d.dy / 2,\n                y1 = d.target.y + d.ty + d.dy / 2;\n            var linkPath = 'M' + x0 + ',' + y0\n                + 'C' + x2 + ',' + y0\n                + ' ' + x3 + ',' + y1\n                + ' ' + x1 + ',' + y1;\n            return linkPath;\n        }\n\n        link.curvature = function(_) {\n            if (!arguments.length) return curvature;\n            curvature = +_;\n            return link;\n        };\n\n        return link;\n    };\n\n    // Y-position of the middle of a node.\n    var center = function(node) {\n        return node.y + node.dy / 2;\n    };\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    // Populate the sourceLinks and targetLinks for each node.\n    // Also, if the source and target are not objects, assume they are indices.\n    function computeNodeLinks() {\n        nodes.forEach(function(node) {\n            // Links that have this node as source.\n            node.sourceLinks = [];\n            // Links that have this node as target.\n            node.targetLinks = [];\n        });\n        links.forEach(function(link) {\n            var source = link.source,\n                target = link.target;\n            if (typeof source === 'number') source = link.source = nodes[link.source];\n            if (typeof target === 'number') target = link.target = nodes[link.target];\n            source.sourceLinks.push(link);\n            target.targetLinks.push(link);\n        });\n    }\n\n    // Compute the value (size) of each node by summing the associated links.\n    function computeNodeValues() {\n        nodes.forEach(function(node) {\n            node.value = Math.max(\n                d3.sum(node.sourceLinks, value),\n                d3.sum(node.targetLinks, value)\n            );\n        });\n    }\n\n    // Iteratively assign the breadth (x-position) for each node.\n    // Nodes are assigned the maximum breadth of incoming neighbors plus one;\n    // nodes with no incoming links are assigned breadth zero, while\n    // nodes with no outgoing links are assigned the maximum breadth.\n    function computeNodeBreadths() {\n        //\n        var remainingNodes = nodes,\n            nextNodes,\n            x = 0;\n\n        // Work from left to right.\n        // Keep updating the breath (x-position) of nodes that are target of recently updated nodes.\n        //\n        while (remainingNodes.length && x < nodes.length) {\n            nextNodes = [];\n            remainingNodes.forEach(function(node) {\n                node.x = x;\n                node.dx = nodeWidth;\n                node.sourceLinks.forEach(function(link) {\n                    if (nextNodes.indexOf(link.target) < 0) {\n                        nextNodes.push(link.target);\n                    }\n                });\n            });\n            remainingNodes = nextNodes;\n            ++x;\n            //\n        }\n\n        // Optionally move pure sinks always to the right.\n        if (sinksRight) {\n            moveSinksRight(x);\n        }\n\n        scaleNodeBreadths((size[0] - nodeWidth) / (x - 1));\n    }\n\n    function moveSourcesRight() {\n        nodes.forEach(function(node) {\n            if (!node.targetLinks.length) {\n                node.x = d3.min(node.sourceLinks, function(d) { return d.target.x; }) - 1;\n            }\n        });\n    }\n\n    function moveSinksRight(x) {\n        nodes.forEach(function(node) {\n            if (!node.sourceLinks.length) {\n                node.x = x - 1;\n            }\n        });\n    }\n\n    function scaleNodeBreadths(kx) {\n        nodes.forEach(function(node) {\n            node.x *= kx;\n        });\n    }\n\n    // Compute the depth (y-position) for each node.\n    function computeNodeDepths(iterations) {\n        // Group nodes by breath.\n        var nodesByBreadth = d3.nest()\n            .key(function(d) { return d.x; })\n            .sortKeys(d3.ascending)\n            .entries(nodes)\n            .map(function(d) { return d.values; });\n\n        //\n        initializeNodeDepth();\n        resolveCollisions();\n        computeLinkDepths();\n        for (var alpha = 1; iterations > 0; --iterations) {\n            relaxRightToLeft(alpha *= .99);\n            resolveCollisions();\n            computeLinkDepths();\n            relaxLeftToRight(alpha);\n            resolveCollisions();\n            computeLinkDepths();\n        }\n\n        function initializeNodeDepth() {\n            // Calculate vertical scaling factor.\n            var ky = d3.min(nodesByBreadth, function(nodes) {\n                return (size[1] - (nodes.length - 1) * nodePadding) / d3.sum(nodes, value);\n            });\n\n            nodesByBreadth.forEach(function(nodes) {\n                nodes.forEach(function(node, i) {\n                    node.y = i;\n                    node.dy = node.value * ky;\n                });\n            });\n\n            links.forEach(function(link) {\n                link.dy = link.value * ky;\n            });\n        }\n\n        function relaxLeftToRight(alpha) {\n            nodesByBreadth.forEach(function(nodes, breadth) {\n                nodes.forEach(function(node) {\n                    if (node.targetLinks.length) {\n                        // Value-weighted average of the y-position of source node centers linked to this node.\n                        var y = d3.sum(node.targetLinks, weightedSource) / d3.sum(node.targetLinks, value);\n                        node.y += (y - center(node)) * alpha;\n                    }\n                });\n            });\n\n            function weightedSource(link) {\n                return (link.source.y + link.sy + link.dy / 2) * link.value;\n            }\n        }\n\n        function relaxRightToLeft(alpha) {\n            nodesByBreadth.slice().reverse().forEach(function(nodes) {\n                nodes.forEach(function(node) {\n                    if (node.sourceLinks.length) {\n                        // Value-weighted average of the y-positions of target nodes linked to this node.\n                        var y = d3.sum(node.sourceLinks, weightedTarget) / d3.sum(node.sourceLinks, value);\n                        node.y += (y - center(node)) * alpha;\n                    }\n                });\n            });\n\n            function weightedTarget(link) {\n                return (link.target.y + link.ty + link.dy / 2) * link.value;\n            }\n        }\n\n        function resolveCollisions() {\n            nodesByBreadth.forEach(function(nodes) {\n                var node,\n                    dy,\n                    y0 = 0,\n                    n = nodes.length,\n                    i;\n\n                // Push any overlapping nodes down.\n                nodes.sort(ascendingDepth);\n                for (i = 0; i < n; ++i) {\n                    node = nodes[i];\n                    dy = y0 - node.y;\n                    if (dy > 0) node.y += dy;\n                    y0 = node.y + node.dy + nodePadding;\n                }\n\n                // If the bottommost node goes outside the bounds, push it back up.\n                dy = y0 - nodePadding - size[1];\n                if (dy > 0) {\n                    y0 = node.y -= dy;\n\n                    // Push any overlapping nodes back up.\n                    for (i = n - 2; i >= 0; --i) {\n                        node = nodes[i];\n                        dy = node.y + node.dy + nodePadding - y0;\n                        if (dy > 0) node.y -= dy;\n                        y0 = node.y;\n                    }\n                }\n            });\n        }\n\n        function ascendingDepth(a, b) {\n            return a.y - b.y;\n        }\n    }\n\n    // Compute y-offset of the source endpoint (sy) and target endpoints (ty) of links,\n    // relative to the source/target node's y-position.\n    function computeLinkDepths() {\n        nodes.forEach(function(node) {\n            node.sourceLinks.sort(ascendingTargetDepth);\n            node.targetLinks.sort(ascendingSourceDepth);\n        });\n        nodes.forEach(function(node) {\n            var sy = 0, ty = 0;\n            node.sourceLinks.forEach(function(link) {\n                link.sy = sy;\n                sy += link.dy;\n            });\n            node.targetLinks.forEach(function(link) {\n                link.ty = ty;\n                ty += link.dy;\n            });\n        });\n\n        function ascendingSourceDepth(a, b) {\n            return a.source.y - b.source.y;\n        }\n\n        function ascendingTargetDepth(a, b) {\n            return a.target.y - b.target.y;\n        }\n    }\n\n    // Value property accessor.\n    function value(x) {\n        return x.value;\n    }\n\n    sankey.options = nv.utils.optionsFunc.bind(sankey);\n    sankey._options = Object.create({}, {\n        nodeWidth:    {get: function(){return nodeWidth;},   set: function(_){nodeWidth=+_;}},\n        nodePadding:  {get: function(){return nodePadding;}, set: function(_){nodePadding=_;}},\n        nodes:        {get: function(){return nodes;},       set: function(_){nodes=_;}},\n        links:        {get: function(){return links ;},      set: function(_){links=_;}},\n        size:         {get: function(){return size;},        set: function(_){size=_;}},\n        sinksRight:   {get: function(){return sinksRight;},  set: function(_){sinksRight=_;}},\n\n        layout:       {get: function(){layout(32);},         set: function(_){layout(_);}},\n        relayout:     {get: function(){relayout();},         set: function(_){}},\n        center:       {get: function(){return center();},    set: function(_){\n            if(typeof _ === 'function'){\n                center=_;\n            }\n        }},\n        link:         {get: function(){return link();},      set: function(_){\n            if(typeof _ === 'function'){\n                link=_;\n            }\n            return link();\n        }}\n    });\n\n    nv.utils.initOptions(sankey);\n\n    return sankey;\n};\n","nv.models.sankeyChart = function() {\n    \"use strict\";\n\n    // Sources:\n    // - https://bost.ocks.org/mike/sankey/\n    // - https://github.com/soxofaan/d3-plugin-captain-sankey\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 5, right: 0, bottom: 5, left: 0}\n        , sankey = nv.models.sankey()\n        , width = 600\n        , height = 400\n        , nodeWidth = 36\n        , nodePadding =  40\n        , units = 'units'\n        , center = undefined\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var formatNumber = d3.format(',.0f');    // zero decimal places\n    var format = function(d) {\n        return formatNumber(d) + ' ' + units;\n    };\n    var color = d3.scale.category20();\n    var linkTitle = function(d){\n        return d.source.name + ' → ' + d.target.name + '\\n' + format(d.value);\n    };\n    var nodeFillColor = function(d){\n        return d.color = color(d.name.replace(/ .*/, ''));\n    };\n    var nodeStrokeColor = function(d){\n        return d3.rgb(d.color).darker(2);\n    };\n    var nodeTitle = function(d){\n        return d.name + '\\n' + format(d.value);\n    };\n\n    var showError = function(element, message) {\n        element.append('text')\n            .attr('x', 0)\n            .attr('y', 0)\n            .attr('class', 'nvd3-sankey-chart-error')\n            .attr('text-anchor', 'middle')\n            .text(message);\n    };\n\n    function chart(selection) {\n        selection.each(function(data) {\n\n            var testData = {\n                nodes:\n                    [\n                        {'node': 1, 'name': 'Test 1'},\n                        {'node': 2, 'name': 'Test 2'},\n                        {'node': 3, 'name': 'Test 3'},\n                        {'node': 4, 'name': 'Test 4'},\n                        {'node': 5, 'name': 'Test 5'},\n                        {'node': 6, 'name': 'Test 6'}\n                    ],\n                links:\n                    [\n                        {'source': 0, 'target': 1, 'value': 2295},\n                        {'source': 0, 'target': 5, 'value': 1199},\n                        {'source': 1, 'target': 2, 'value': 1119},\n                        {'source': 1, 'target': 5, 'value': 1176},\n                        {'source': 2, 'target': 3, 'value': 487},\n                        {'source': 2, 'target': 5, 'value': 632},\n                        {'source': 3, 'target': 4, 'value': 301},\n                        {'source': 3, 'target': 5, 'value': 186}\n                    ]\n            };\n\n            // Error handling\n            var isDataValid = false;\n            var dataAvailable = false;\n\n            // check if data is valid\n            if(\n                (typeof data['nodes'] === 'object' && data['nodes'].length) >= 0 &&\n                (typeof data['links'] === 'object' && data['links'].length) >= 0\n            ){\n                isDataValid = true;\n            }\n\n            // check if data is available\n            if(\n                data['nodes'] && data['nodes'].length > 0 &&\n                data['links'] && data['links'].length > 0\n            ) {\n                dataAvailable = true;\n            }\n\n            // show error\n            if(!isDataValid) {\n                console.error('NVD3 Sankey chart error:', 'invalid data format for', data);\n                console.info('Valid data format is: ', testData, JSON.stringify(testData));\n                showError(selection, 'Error loading chart, data is invalid');\n                return false;\n            }\n\n            // TODO use nv.utils.noData\n            if(!dataAvailable) {\n                showError(selection, 'No data available');\n                return false;\n            }\n\n            // No errors, continue\n\n            // append the svg canvas to the page\n            var svg = selection.append('svg')\n                .attr('width', width)\n                .attr('height', height)\n                .append('g')\n                .attr('class', 'nvd3 nv-wrap nv-sankeyChart');\n\n            // Set the sankey diagram properties\n            sankey\n                .nodeWidth(nodeWidth)\n                .nodePadding(nodePadding)\n                .size([width, height]);\n\n            var path = sankey.link();\n\n            sankey\n                .nodes(data.nodes)\n                .links(data.links)\n                .layout(32)\n                .center(center);\n\n            // add in the links\n            var link = svg.append('g').selectAll('.link')\n                .data(data.links)\n                .enter().append('path')\n                .attr('class', 'link')\n                .attr('d', path)\n                .style('stroke-width', function(d) { return Math.max(1, d.dy); })\n            .sort(function(a,b) { return b.dy - a.dy; });\n\n            // add the link titles\n            link.append('title')\n                .text(linkTitle);\n\n            // add in the nodes\n            var node = svg.append('g').selectAll('.node')\n                .data(data.nodes)\n                .enter().append('g')\n                .attr('class', 'node')\n                .attr('transform', function(d) { return 'translate(' + d.x + ',' + d.y + ')'; })\n                .call(\n                    d3.behavior\n                        .drag()\n                        .origin(function(d) { return d; })\n                        .on('dragstart', function() {\n                            this.parentNode.appendChild(this);\n                        })\n                        .on('drag', dragmove)\n                );\n\n            // add the rectangles for the nodes\n            node.append('rect')\n                .attr('height', function(d) { return d.dy; })\n                .attr('width', sankey.nodeWidth())\n                .style('fill', nodeFillColor)\n                .style('stroke', nodeStrokeColor)\n                .append('title')\n                .text(nodeTitle);\n\n            // add in the title for the nodes\n            node.append('text')\n                .attr('x', -6)\n                .attr('y', function(d) { return d.dy / 2; })\n                .attr('dy', '.35em')\n                .attr('text-anchor', 'end')\n                .attr('transform', null)\n                .text(function(d) { return d.name; })\n                .filter(function(d) { return d.x < width / 2; })\n                .attr('x', 6 + sankey.nodeWidth())\n                .attr('text-anchor', 'start');\n\n            // the function for moving the nodes\n            function dragmove(d) {\n                d3.select(this).attr('transform',\n                'translate(' + d.x + ',' + (\n                    d.y = Math.max(0, Math.min(height - d.dy, d3.event.y))\n                ) + ')');\n                sankey.relayout();\n                link.attr('d', path);\n            }\n        });\n\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        units:           {get: function(){return units;},       set: function(_){units=_;}},\n        width:           {get: function(){return width;},       set: function(_){width=_;}},\n        height:          {get: function(){return height;},      set: function(_){height=_;}},\n        format:          {get: function(){return format;},      set: function(_){format=_;}},\n        linkTitle:       {get: function(){return linkTitle;},   set: function(_){linkTitle=_;}},\n        nodeWidth:       {get: function(){return nodeWidth;},   set: function(_){nodeWidth=_;}},\n        nodePadding:     {get: function(){return nodePadding;}, set: function(_){nodePadding=_;}},\n        center:          {get: function(){return center},       set: function(_){center=_}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        nodeStyle: {get: function(){return {};}, set: function(_){\n            nodeFillColor   = _.fillColor   !== undefined ? _.fillColor   : nodeFillColor;\n            nodeStrokeColor = _.strokeColor !== undefined ? _.strokeColor : nodeStrokeColor;\n            nodeTitle       = _.title       !== undefined ? _.title       : nodeTitle;\n        }}\n\n    });\n\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.scatter = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin       = {top: 0, right: 0, bottom: 0, left: 0}\n        , width        = null\n        , height       = null\n        , color        = nv.utils.defaultColor() // chooses color\n        , pointBorderColor = null\n        , id           = Math.floor(Math.random() * 100000) //Create semi-unique ID incase user doesn't select one\n        , container    = null\n        , x            = d3.scale.linear()\n        , y            = d3.scale.linear()\n        , z            = d3.scale.linear() //linear because d3.svg.shape.size is treated as area\n        , getX         = function(d) { return d.x } // accessor to get the x value\n        , getY         = function(d) { return d.y } // accessor to get the y value\n        , getSize      = function(d) { return d.size || 1} // accessor to get the point size\n        , getShape     = function(d) { return d.shape || 'circle' } // accessor to get point shape\n        , forceX       = [] // List of numbers to Force into the X scale (ie. 0, or a max / min, etc.)\n        , forceY       = [] // List of numbers to Force into the Y scale\n        , forceSize    = [] // List of numbers to Force into the Size scale\n        , interactive  = true // If true, plots a voronoi overlay for advanced point intersection\n        , pointActive  = function(d) { return !d.notActive } // any points that return false will be filtered out\n        , padData      = false // If true, adds half a data points width to front and back, for lining up a line chart with a bar chart\n        , padDataOuter = .1 //outerPadding to imitate ordinal scale outer padding\n        , clipEdge     = false // if true, masks points within x and y scale\n        , clipVoronoi  = true // if true, masks each point with a circle... can turn off to slightly increase performance\n        , showVoronoi  = false // display the voronoi areas\n        , clipRadius   = function() { return 25 } // function to get the radius for voronoi point clips\n        , xDomain      = null // Override x domain (skips the calculation from data)\n        , yDomain      = null // Override y domain\n        , xRange       = null // Override x range\n        , yRange       = null // Override y range\n        , sizeDomain   = null // Override point size domain\n        , sizeRange    = null\n        , singlePoint  = false\n        , dispatch     = d3.dispatch('elementClick', 'elementDblClick', 'elementMouseover', 'elementMouseout', 'renderEnd')\n        , useVoronoi   = true\n        , duration     = 250\n        , interactiveUpdateDelay = 300\n        , showLabels    = false\n        ;\n\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var x0, y0, z0 // used to store previous scales\n        , width0\n        , height0\n        , timeoutID\n        , needsUpdate = false // Flag for when the points are visually updating, but the interactive layer is behind, to disable tooltips\n        , renderWatch = nv.utils.renderWatch(dispatch, duration)\n        , _sizeRange_def = [16, 256]\n        , _cache = {}\n        ;\n\n    function getCache(d) {\n        var key, val;\n        key = d[0].series + ':' + d[1];\n        val = _cache[key] = _cache[key] || {};\n        return val;\n    }\n\n    function delCache(d) {\n        var key, val;\n        key = d[0].series + ':' + d[1];\n        delete _cache[key];\n    }\n\n    function getDiffs(d) {\n        var i, key, val,\n            cache = getCache(d),\n            diffs = false;\n        for (i = 1; i < arguments.length; i += 2) {\n            key = arguments[i];\n            val = arguments[i + 1](d[0], d[1]);\n            if (cache[key] !== val || !cache.hasOwnProperty(key)) {\n                cache[key] = val;\n                diffs = true;\n            }\n        }\n        return diffs;\n    }\n\n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            container = d3.select(this);\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            nv.utils.initSVG(container);\n\n            //add series index to each data point for reference\n            data.forEach(function(series, i) {\n                series.values.forEach(function(point) {\n                    point.series = i;\n                });\n            });\n\n            // Setup Scales\n            var logScale = chart.yScale().name === d3.scale.log().name ? true : false;\n            // remap and flatten the data for use in calculating the scales' domains\n            var seriesData = (xDomain && yDomain && sizeDomain) ? [] : // if we know xDomain and yDomain and sizeDomain, no need to calculate.... if Size is constant remember to set sizeDomain to speed up performance\n                d3.merge(\n                    data.map(function(d) {\n                        return d.values.map(function(d,i) {\n                            return { x: getX(d,i), y: getY(d,i), size: getSize(d,i) }\n                        })\n                    })\n                );\n\n            x   .domain(xDomain || d3.extent(seriesData.map(function(d) { return d.x; }).concat(forceX)))\n\n            if (padData && data[0])\n                x.range(xRange || [(availableWidth * padDataOuter +  availableWidth) / (2 *data[0].values.length), availableWidth - availableWidth * (1 + padDataOuter) / (2 * data[0].values.length)  ]);\n            //x.range([availableWidth * .5 / data[0].values.length, availableWidth * (data[0].values.length - .5)  / data[0].values.length ]);\n            else\n                x.range(xRange || [0, availableWidth]);\n\n             if (logScale) {\n                    var min = d3.min(seriesData.map(function(d) { if (d.y !== 0) return d.y; }));\n                    y.clamp(true)\n                        .domain(yDomain || d3.extent(seriesData.map(function(d) {\n                            if (d.y !== 0) return d.y;\n                            else return min * 0.1;\n                        }).concat(forceY)))\n                        .range(yRange || [availableHeight, 0]);\n                } else {\n                        y.domain(yDomain || d3.extent(seriesData.map(function (d) { return d.y;}).concat(forceY)))\n                        .range(yRange || [availableHeight, 0]);\n                }\n\n            z   .domain(sizeDomain || d3.extent(seriesData.map(function(d) { return d.size }).concat(forceSize)))\n                .range(sizeRange || _sizeRange_def);\n\n            // If scale's domain don't have a range, slightly adjust to make one... so a chart can show a single data point\n            singlePoint = x.domain()[0] === x.domain()[1] || y.domain()[0] === y.domain()[1];\n\n            if (x.domain()[0] === x.domain()[1])\n                x.domain()[0] ?\n                    x.domain([x.domain()[0] - x.domain()[0] * 0.01, x.domain()[1] + x.domain()[1] * 0.01])\n                    : x.domain([-1,1]);\n\n            if (y.domain()[0] === y.domain()[1])\n                y.domain()[0] ?\n                    y.domain([y.domain()[0] - y.domain()[0] * 0.01, y.domain()[1] + y.domain()[1] * 0.01])\n                    : y.domain([-1,1]);\n\n            if ( isNaN(x.domain()[0])) {\n                x.domain([-1,1]);\n            }\n\n            if ( isNaN(y.domain()[0])) {\n                y.domain([-1,1]);\n            }\n\n            x0 = x0 || x;\n            y0 = y0 || y;\n            z0 = z0 || z;\n\n            var scaleDiff = x(1) !== x0(1) || y(1) !== y0(1) || z(1) !== z0(1);\n\n            width0 = width0 || width;\n            height0 = height0 || height;\n\n            var sizeDiff = width0 !== width || height0 !== height;\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-scatter').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-scatter nv-chart-' + id);\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            wrap.classed('nv-single-point', singlePoint);\n            gEnter.append('g').attr('class', 'nv-groups');\n            gEnter.append('g').attr('class', 'nv-point-paths');\n            wrapEnter.append('g').attr('class', 'nv-point-clips');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-edge-clip-' + id)\n                .append('rect')\n                .attr('transform', 'translate( -10, -10)');\n                \n            wrap.select('#nv-edge-clip-' + id + ' rect')\n                .attr('width', availableWidth + 20)\n                .attr('height', (availableHeight > 0) ? availableHeight + 20 : 0);\n\n            g.attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + id + ')' : '');\n\n            function updateInteractiveLayer() {\n                // Always clear needs-update flag regardless of whether or not\n                // we will actually do anything (avoids needless invocations).\n                needsUpdate = false;\n\n                if (!interactive) return false;\n\n                // inject series and point index for reference into voronoi\n                if (useVoronoi === true) {\n                    var vertices = d3.merge(data.map(function(group, groupIndex) {\n                            return group.values\n                                .map(function(point, pointIndex) {\n                                    // *Adding noise to make duplicates very unlikely\n                                    // *Injecting series and point index for reference\n                                    /* *Adding a 'jitter' to the points, because there's an issue in d3.geom.voronoi.\n                                     */\n                                    var pX = getX(point,pointIndex);\n                                    var pY = getY(point,pointIndex);\n\n                                    return [nv.utils.NaNtoZero(x(pX))+ Math.random() * 1e-4,\n                                            nv.utils.NaNtoZero(y(pY))+ Math.random() * 1e-4,\n                                        groupIndex,\n                                        pointIndex, point]; //temp hack to add noise until I think of a better way so there are no duplicates\n                                })\n                                .filter(function(pointArray, pointIndex) {\n                                    return pointActive(pointArray[4], pointIndex); // Issue #237.. move filter to after map, so pointIndex is correct!\n                                })\n                        })\n                    );\n\n                    if (vertices.length == 0) return false;  // No active points, we're done\n                    if (vertices.length < 3) {\n                        // Issue #283 - Adding 2 dummy points to the voronoi b/c voronoi requires min 3 points to work\n                        vertices.push([x.range()[0] - 20, y.range()[0] - 20, null, null]);\n                        vertices.push([x.range()[1] + 20, y.range()[1] + 20, null, null]);\n                        vertices.push([x.range()[0] - 20, y.range()[0] + 20, null, null]);\n                        vertices.push([x.range()[1] + 20, y.range()[1] - 20, null, null]);\n                    }\n\n                    // keep voronoi sections from going more than 10 outside of graph\n                    // to avoid overlap with other things like legend etc\n                    var bounds = d3.geom.polygon([\n                        [-10,-10],\n                        [-10,height + 10],\n                        [width + 10,height + 10],\n                        [width + 10,-10]\n                    ]);\n\n                    var voronoi = d3.geom.voronoi(vertices).map(function(d, i) {\n                        return {\n                            'data': bounds.clip(d),\n                            'series': vertices[i][2],\n                            'point': vertices[i][3]\n                        }\n                    });\n\n                    // nuke all voronoi paths on reload and recreate them\n                    wrap.select('.nv-point-paths').selectAll('path').remove();\n                    var pointPaths = wrap.select('.nv-point-paths').selectAll('path').data(voronoi);\n                    var vPointPaths = pointPaths\n                        .enter().append(\"svg:path\")\n                        .attr(\"d\", function(d) {\n                            if (!d || !d.data || d.data.length === 0)\n                                return 'M 0 0';\n                            else\n                                return \"M\" + d.data.join(\",\") + \"Z\";\n                        })\n                        .attr(\"id\", function(d,i) {\n                            return \"nv-path-\"+i; })\n                        .attr(\"clip-path\", function(d,i) { return \"url(#nv-clip-\"+id+\"-\"+i+\")\"; })\n                        ;\n\n                    // good for debugging point hover issues\n                    if (showVoronoi) {\n                        vPointPaths.style(\"fill\", d3.rgb(230, 230, 230))\n                            .style('fill-opacity', 0.4)\n                            .style('stroke-opacity', 1)\n                            .style(\"stroke\", d3.rgb(200,200,200));\n                    }\n\n                    if (clipVoronoi) {\n                        // voronoi sections are already set to clip,\n                        // just create the circles with the IDs they expect\n                        wrap.select('.nv-point-clips').selectAll('*').remove(); // must do * since it has sub-dom\n                        var pointClips = wrap.select('.nv-point-clips').selectAll('clipPath').data(vertices);\n                        var vPointClips = pointClips\n                            .enter().append(\"svg:clipPath\")\n                            .attr(\"id\", function(d, i) { return \"nv-clip-\"+id+\"-\"+i;})\n                            .append(\"svg:circle\")\n                            .attr('cx', function(d) { return d[0]; })\n                            .attr('cy', function(d) { return d[1]; })\n                            .attr('r', clipRadius);\n                    }\n\n                    var mouseEventCallback = function(el, d, mDispatch) {\n                        if (needsUpdate) return 0;\n                        var series = data[d.series];\n                        if (series === undefined) return;\n                        var point  = series.values[d.point];\n                        point['color'] = color(series, d.series);\n\n                        // standardize attributes for tooltip.\n                        point['x'] = getX(point);\n                        point['y'] = getY(point);\n\n                        // can't just get box of event node since it's actually a voronoi polygon\n                        var box = container.node().getBoundingClientRect();\n                        var scrollTop  = window.pageYOffset || document.documentElement.scrollTop;\n                        var scrollLeft = window.pageXOffset || document.documentElement.scrollLeft;\n\n                        var pos = {\n                            left: x(getX(point, d.point)) + box.left + scrollLeft + margin.left + 10,\n                            top: y(getY(point, d.point)) + box.top + scrollTop + margin.top + 10\n                        };\n\n                        mDispatch({\n                            point: point,\n                            series: series,\n                            pos: pos,\n                            relativePos: [x(getX(point, d.point)) + margin.left, y(getY(point, d.point)) + margin.top],\n                            seriesIndex: d.series,\n                            pointIndex: d.point,\n                            event: d3.event,\n                            element: el\n                        });\n                    };\n\n                    pointPaths\n                        .on('click', function(d) {\n                            mouseEventCallback(this, d, dispatch.elementClick);\n                        })\n                        .on('dblclick', function(d) {\n                            mouseEventCallback(this, d, dispatch.elementDblClick);\n                        })\n                        .on('mouseover', function(d) {\n                            mouseEventCallback(this, d, dispatch.elementMouseover);\n                        })\n                        .on('mouseout', function(d, i) {\n                            mouseEventCallback(this, d, dispatch.elementMouseout);\n                        });\n\n                } else {\n                    // add event handlers to points instead voronoi paths\n                    wrap.select('.nv-groups').selectAll('.nv-group')\n                        .selectAll('.nv-point')\n                        //.data(dataWithPoints)\n                        //.style('pointer-events', 'auto') // recativate events, disabled by css\n                        .on('click', function(d,i) {\n                            //nv.log('test', d, i);\n                            if (needsUpdate || !data[d.series]) return 0; //check if this is a dummy point\n                            var series = data[d.series],\n                                point  = series.values[i];\n                            var element = this;\n                            dispatch.elementClick({\n                                point: point,\n                                series: series,\n                                pos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top], //TODO: make this pos base on the page\n                                relativePos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],\n                                seriesIndex: d.series,\n                                pointIndex: i,\n                                event: d3.event,\n                                element: element\n                            });\n                        })\n                        .on('dblclick', function(d,i) {\n                            if (needsUpdate || !data[d.series]) return 0; //check if this is a dummy point\n                            var series = data[d.series],\n                                point  = series.values[i];\n\n                            dispatch.elementDblClick({\n                                point: point,\n                                series: series,\n                                pos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],//TODO: make this pos base on the page\n                                relativePos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],\n                                seriesIndex: d.series,\n                                pointIndex: i\n                            });\n                        })\n                        .on('mouseover', function(d,i) {\n                            if (needsUpdate || !data[d.series]) return 0; //check if this is a dummy point\n                            var series = data[d.series],\n                                point  = series.values[i];\n\n                            dispatch.elementMouseover({\n                                point: point,\n                                series: series,\n                                pos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],//TODO: make this pos base on the page\n                                relativePos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],\n                                seriesIndex: d.series,\n                                pointIndex: i,\n                                color: color(d, i)\n                            });\n                        })\n                        .on('mouseout', function(d,i) {\n                            if (needsUpdate || !data[d.series]) return 0; //check if this is a dummy point\n                            var series = data[d.series],\n                                point  = series.values[i];\n\n                            dispatch.elementMouseout({\n                                point: point,\n                                series: series,\n                                pos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],//TODO: make this pos base on the page\n                                relativePos: [x(getX(point, i)) + margin.left, y(getY(point, i)) + margin.top],\n                                seriesIndex: d.series,\n                                pointIndex: i,\n                                color: color(d, i)\n                            });\n                        });\n                }\n            }\n\n            needsUpdate = true;\n            var groups = wrap.select('.nv-groups').selectAll('.nv-group')\n                .data(function(d) { return d }, function(d) { return d.key });\n            groups.enter().append('g')\n                .style('stroke-opacity', 1e-6)\n                .style('fill-opacity', 1e-6);\n            groups.exit()\n                .remove();\n            groups\n                .attr('class', function(d,i) {\n                    return (d.classed || '') + ' nv-group nv-series-' + i;\n                })\n                .classed('nv-noninteractive', !interactive)\n                .classed('hover', function(d) { return d.hover });\n            groups.watchTransition(renderWatch, 'scatter: groups')\n                .style('fill', function(d,i) { return color(d, i) })\n                .style('stroke', function(d,i) { return d.pointBorderColor || pointBorderColor || color(d, i) })\n                .style('stroke-opacity', 1)\n                .style('fill-opacity', .5);\n\n            // create the points, maintaining their IDs from the original data set\n            var points = groups.selectAll('path.nv-point')\n                .data(function(d) {\n                    return d.values.map(\n                        function (point, pointIndex) {\n                            return [point, pointIndex]\n                        }).filter(\n                            function(pointArray, pointIndex) {\n                                return pointActive(pointArray[0], pointIndex)\n                            })\n                    });\n            points.enter().append('path')\n                .attr('class', function (d) {\n                    return 'nv-point nv-point-' + d[1];\n                })\n                .style('fill', function (d) { return d.color })\n                .style('stroke', function (d) { return d.color })\n                .attr('transform', function(d) {\n                    return 'translate(' + nv.utils.NaNtoZero(x0(getX(d[0],d[1]))) + ',' + nv.utils.NaNtoZero(y0(getY(d[0],d[1]))) + ')'\n                })\n                .attr('d',\n                    nv.utils.symbol()\n                    .type(function(d) { return getShape(d[0]); })\n                    .size(function(d) { return z(getSize(d[0],d[1])) })\n            );\n            points.exit().each(delCache).remove();\n            groups.exit().selectAll('path.nv-point')\n                .watchTransition(renderWatch, 'scatter exit')\n                .attr('transform', function(d) {\n                    return 'translate(' + nv.utils.NaNtoZero(x(getX(d[0],d[1]))) + ',' + nv.utils.NaNtoZero(y(getY(d[0],d[1]))) + ')'\n                })\n                .remove();\n            // Update points position only if \"x\" or \"y\" have changed\n            points.filter(function (d) { return scaleDiff || sizeDiff || getDiffs(d, 'x', getX, 'y', getY); })\n                .watchTransition(renderWatch, 'scatter points')\n                .attr('transform', function(d) {\n                    //nv.log(d, getX(d[0],d[1]), x(getX(d[0],d[1])));\n                    return 'translate(' + nv.utils.NaNtoZero(x(getX(d[0],d[1]))) + ',' + nv.utils.NaNtoZero(y(getY(d[0],d[1]))) + ')'\n                });\n            // Update points appearance only if \"shape\" or \"size\" have changed\n            points.filter(function (d) { return scaleDiff || sizeDiff || getDiffs(d, 'shape', getShape, 'size', getSize); })\n                .watchTransition(renderWatch, 'scatter points')\n                .attr('d',\n                    nv.utils.symbol()\n                    .type(function(d) { return getShape(d[0]); })\n                    .size(function(d) { return z(getSize(d[0],d[1])) })\n            );\n\n            // add label a label to scatter chart\n            if(showLabels)\n            {\n                var titles =  groups.selectAll('.nv-label')\n                    .data(function(d) {\n                        return d.values.map(\n                            function (point, pointIndex) {\n                                return [point, pointIndex]\n                            }).filter(\n                                function(pointArray, pointIndex) {\n                                    return pointActive(pointArray[0], pointIndex)\n                                })\n                        });\n\n                titles.enter().append('text')\n                    .style('fill', function (d,i) {\n                        return d.color })\n                    .style('stroke-opacity', 0)\n                    .style('fill-opacity', 1)\n                    .attr('transform', function(d) {\n                        var dx = nv.utils.NaNtoZero(x0(getX(d[0],d[1]))) + Math.sqrt(z(getSize(d[0],d[1]))/Math.PI) + 2;\n                        return 'translate(' + dx + ',' + nv.utils.NaNtoZero(y0(getY(d[0],d[1]))) + ')';\n                    })\n                    .text(function(d,i){\n                        return d[0].label;});\n\n                titles.exit().remove();\n                groups.exit().selectAll('path.nv-label')\n                    .watchTransition(renderWatch, 'scatter exit')\n                    .attr('transform', function(d) {\n                        var dx = nv.utils.NaNtoZero(x(getX(d[0],d[1])))+ Math.sqrt(z(getSize(d[0],d[1]))/Math.PI)+2;\n                        return 'translate(' + dx + ',' + nv.utils.NaNtoZero(y(getY(d[0],d[1]))) + ')';\n                    })\n                    .remove();\n               titles.each(function(d) {\n                  d3.select(this)\n                    .classed('nv-label', true)\n                    .classed('nv-label-' + d[1], false)\n                    .classed('hover',false);\n                });\n                titles.watchTransition(renderWatch, 'scatter labels')\n                    .attr('transform', function(d) {\n                        var dx = nv.utils.NaNtoZero(x(getX(d[0],d[1])))+ Math.sqrt(z(getSize(d[0],d[1]))/Math.PI)+2;\n                        return 'translate(' + dx + ',' + nv.utils.NaNtoZero(y(getY(d[0],d[1]))) + ')'\n                    });\n            }\n\n            // Delay updating the invisible interactive layer for smoother animation\n            if( interactiveUpdateDelay )\n            {\n                clearTimeout(timeoutID); // stop repeat calls to updateInteractiveLayer\n                timeoutID = setTimeout(updateInteractiveLayer, interactiveUpdateDelay );\n            }\n            else\n            {\n                updateInteractiveLayer();\n            }\n\n            //store old scales for use in transitions on update\n            x0 = x.copy();\n            y0 = y.copy();\n            z0 = z.copy();\n\n            width0 = width;\n            height0 = height;\n\n        });\n        renderWatch.renderEnd('scatter immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    // utility function calls provided by this chart\n    chart._calls = new function() {\n        this.clearHighlights = function () {\n            nv.dom.write(function() {\n                container.selectAll(\".nv-point.hover\").classed(\"hover\", false);\n            });\n            return null;\n        };\n        this.highlightPoint = function (seriesIndex, pointIndex, isHoverOver) {\n            nv.dom.write(function() {\n                container.select('.nv-groups')\n                  .selectAll(\".nv-series-\" + seriesIndex)\n                  .selectAll(\".nv-point-\" + pointIndex)\n                  .classed(\"hover\", isHoverOver);\n            });\n        };\n    };\n\n    // trigger calls from events too\n    dispatch.on('elementMouseover.point', function(d) {\n        if (interactive) chart._calls.highlightPoint(d.seriesIndex,d.pointIndex,true);\n    });\n\n    dispatch.on('elementMouseout.point', function(d) {\n        if (interactive) chart._calls.highlightPoint(d.seriesIndex,d.pointIndex,false);\n    });\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:        {get: function(){return width;}, set: function(_){width=_;}},\n        height:       {get: function(){return height;}, set: function(_){height=_;}},\n        xScale:       {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:       {get: function(){return y;}, set: function(_){y=_;}},\n        pointScale:   {get: function(){return z;}, set: function(_){z=_;}},\n        xDomain:      {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain:      {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        pointDomain:  {get: function(){return sizeDomain;}, set: function(_){sizeDomain=_;}},\n        xRange:       {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:       {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        pointRange:   {get: function(){return sizeRange;}, set: function(_){sizeRange=_;}},\n        forceX:       {get: function(){return forceX;}, set: function(_){forceX=_;}},\n        forceY:       {get: function(){return forceY;}, set: function(_){forceY=_;}},\n        forcePoint:   {get: function(){return forceSize;}, set: function(_){forceSize=_;}},\n        interactive:  {get: function(){return interactive;}, set: function(_){interactive=_;}},\n        pointActive:  {get: function(){return pointActive;}, set: function(_){pointActive=_;}},\n        padDataOuter: {get: function(){return padDataOuter;}, set: function(_){padDataOuter=_;}},\n        padData:      {get: function(){return padData;}, set: function(_){padData=_;}},\n        clipEdge:     {get: function(){return clipEdge;}, set: function(_){clipEdge=_;}},\n        clipVoronoi:  {get: function(){return clipVoronoi;}, set: function(_){clipVoronoi=_;}},\n        clipRadius:   {get: function(){return clipRadius;}, set: function(_){clipRadius=_;}},\n        showVoronoi:   {get: function(){return showVoronoi;}, set: function(_){showVoronoi=_;}},\n        id:           {get: function(){return id;}, set: function(_){id=_;}},\n        interactiveUpdateDelay: {get:function(){return interactiveUpdateDelay;}, set: function(_){interactiveUpdateDelay=_;}},\n        showLabels: {get: function(){return showLabels;}, set: function(_){ showLabels = _;}},\n        pointBorderColor: {get: function(){return pointBorderColor;}, set: function(_){pointBorderColor=_;}},\n\n        // simple functor options\n        x:     {get: function(){return getX;}, set: function(_){getX = d3.functor(_);}},\n        y:     {get: function(){return getY;}, set: function(_){getY = d3.functor(_);}},\n        pointSize: {get: function(){return getSize;}, set: function(_){getSize = d3.functor(_);}},\n        pointShape: {get: function(){return getShape;}, set: function(_){getShape = d3.functor(_);}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n        }},\n        color: {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        useVoronoi: {get: function(){return useVoronoi;}, set: function(_){\n            useVoronoi = _;\n            if (useVoronoi === false) {\n                clipVoronoi = false;\n            }\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","\nnv.models.scatterChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var scatter      = nv.models.scatter()\n        , xAxis        = nv.models.axis()\n        , yAxis        = nv.models.axis()\n        , legend       = nv.models.legend()\n        , distX        = nv.models.distribution()\n        , distY        = nv.models.distribution()\n        , tooltip      = nv.models.tooltip()\n        ;\n\n    var margin       = {top: 30, right: 20, bottom: 50, left: 75}\n        , marginTop = null\n        , width        = null\n        , height       = null\n        , container    = null\n        , color        = nv.utils.defaultColor()\n        , x            = scatter.xScale()\n        , y            = scatter.yScale()\n        , showDistX    = false\n        , showDistY    = false\n        , showLegend   = true\n        , showXAxis    = true\n        , showYAxis    = true\n        , rightAlignYAxis = false\n        , state = nv.utils.state()\n        , defaultState = null\n        , dispatch = d3.dispatch('stateChange', 'changeState', 'renderEnd')\n        , noData       = null\n        , duration = 250\n        , showLabels    = false\n        ;\n\n    scatter.xScale(x).yScale(y);\n    xAxis.orient('bottom').tickPadding(10);\n    yAxis\n        .orient((rightAlignYAxis) ? 'right' : 'left')\n        .tickPadding(10)\n    ;\n    distX.axis('x');\n    distY.axis('y');\n    tooltip\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        })\n        .valueFormatter(function(d, i) {\n            return yAxis.tickFormat()(d, i);\n        });\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var x0, y0\n        , renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled })\n            };\n        }\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.active !== undefined)\n                data.forEach(function(series,i) {\n                    series.disabled = !state.active[i];\n                });\n        }\n    };\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(scatter);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n        if (showDistX) renderWatch.models(distX);\n        if (showDistY) renderWatch.models(distY);\n\n        selection.each(function(data) {\n            var that = this;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() {\n                if (duration === 0)\n                    container.call(chart);\n                else\n                    container.transition().duration(duration).call(chart);\n            };\n            chart.container = this;\n\n            state\n                .setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            // DEPRECATED set state.disableddisabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display noData message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container);\n                renderWatch.renderEnd('scatter immediate');\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            // Setup Scales\n            x = scatter.xScale();\n            y = scatter.yScale();\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-scatterChart').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-scatterChart nv-chart-' + scatter.id());\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            // background for pointer events\n            gEnter.append('rect').attr('class', 'nvd3 nv-background').style(\"pointer-events\",\"none\");\n\n            gEnter.append('g').attr('class', 'nv-x nv-axis');\n            gEnter.append('g').attr('class', 'nv-y nv-axis');\n            gEnter.append('g').attr('class', 'nv-scatterWrap');\n            gEnter.append('g').attr('class', 'nv-regressionLinesWrap');\n            gEnter.append('g').attr('class', 'nv-distWrap');\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                var legendWidth = availableWidth;\n                legend.width(legendWidth);\n\n                wrap.select('.nv-legendWrap')\n                    .datum(data)\n                    .call(legend);\n\n                if (!marginTop && legend.height() !== margin.top) {\n                    margin.top = legend.height();\n                    availableHeight = nv.utils.availableHeight(height, container, margin);\n                }\n\n                wrap.select('.nv-legendWrap')\n                    .attr('transform', 'translate(0' + ',' + (-margin.top) +')');\n            }\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            // Main Chart Component(s)\n            scatter\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    d.color = d.color || color(d, i);\n                    return d.color;\n                }).filter(function(d,i) { return !data[i].disabled }))\n                .showLabels(showLabels);\n\n            wrap.select('.nv-scatterWrap')\n                .datum(data.filter(function(d) { return !d.disabled }))\n                .call(scatter);\n\n\n            wrap.select('.nv-regressionLinesWrap')\n                .attr('clip-path', 'url(#nv-edge-clip-' + scatter.id() + ')');\n\n            var regWrap = wrap.select('.nv-regressionLinesWrap').selectAll('.nv-regLines')\n                .data(function (d) {\n                    return d;\n                });\n\n            regWrap.enter().append('g').attr('class', 'nv-regLines');\n\n            var regLine = regWrap.selectAll('.nv-regLine')\n                .data(function (d) {\n                    return [d]\n                });\n\n            regLine.enter()\n                .append('line').attr('class', 'nv-regLine')\n                .style('stroke-opacity', 0);\n\n            // don't add lines unless we have slope and intercept to use\n            regLine.filter(function(d) {\n                return d.intercept && d.slope;\n            })\n                .watchTransition(renderWatch, 'scatterPlusLineChart: regline')\n                .attr('x1', x.range()[0])\n                .attr('x2', x.range()[1])\n                .attr('y1', function (d, i) {\n                    return y(x.domain()[0] * d.slope + d.intercept)\n                })\n                .attr('y2', function (d, i) {\n                    return y(x.domain()[1] * d.slope + d.intercept)\n                })\n                .style('stroke', function (d, i, j) {\n                    return color(d, j)\n                })\n                .style('stroke-opacity', function (d, i) {\n                    return (d.disabled || typeof d.slope === 'undefined' || typeof d.intercept === 'undefined') ? 0 : 1\n                });\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis\n                    .scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize( -availableHeight , 0);\n\n                g.select('.nv-x.nv-axis')\n                    .attr('transform', 'translate(0,' + y.range()[0] + ')')\n                    .call(xAxis);\n            }\n\n            if (showYAxis) {\n                yAxis\n                    .scale(y)\n                    ._ticks( nv.utils.calcTicksY(availableHeight/36, data) )\n                    .tickSize( -availableWidth, 0);\n\n                g.select('.nv-y.nv-axis')\n                    .call(yAxis);\n            }\n\n            // Setup Distribution\n            if (showDistX) {\n                distX\n                    .getData(scatter.x())\n                    .scale(x)\n                    .width(availableWidth)\n                    .color(data.map(function(d,i) {\n                        return d.color || color(d, i);\n                    }).filter(function(d,i) { return !data[i].disabled }));\n                gEnter.select('.nv-distWrap').append('g')\n                    .attr('class', 'nv-distributionX');\n                g.select('.nv-distributionX')\n                    .attr('transform', 'translate(0,' + y.range()[0] + ')')\n                    .datum(data.filter(function(d) { return !d.disabled }))\n                    .call(distX);\n            }\n\n            if (showDistY) {\n                distY\n                    .getData(scatter.y())\n                    .scale(y)\n                    .width(availableHeight)\n                    .color(data.map(function(d,i) {\n                        return d.color || color(d, i);\n                    }).filter(function(d,i) { return !data[i].disabled }));\n                gEnter.select('.nv-distWrap').append('g')\n                    .attr('class', 'nv-distributionY');\n                g.select('.nv-distributionY')\n                    .attr('transform', 'translate(' + (rightAlignYAxis ? availableWidth : -distY.size() ) + ',0)')\n                    .datum(data.filter(function(d) { return !d.disabled }))\n                    .call(distY);\n            }\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState)\n                    state[key] = newState[key];\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            // Update chart from a state object passed to event handler\n            dispatch.on('changeState', function(e) {\n                if (typeof e.disabled !== 'undefined') {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n                    state.disabled = e.disabled;\n                }\n                chart.update();\n            });\n\n            // mouseover needs availableHeight so we just keep scatter mouse events inside the chart block\n            scatter.dispatch.on('elementMouseout.tooltip', function(evt) {\n                tooltip.hidden(true);\n                container.select('.nv-chart-' + scatter.id() + ' .nv-series-' + evt.seriesIndex + ' .nv-distx-' + evt.pointIndex)\n                    .attr('y1', 0);\n                container.select('.nv-chart-' + scatter.id() + ' .nv-series-' + evt.seriesIndex + ' .nv-disty-' + evt.pointIndex)\n                    .attr('x2', distY.size());\n            });\n\n            scatter.dispatch.on('elementMouseover.tooltip', function(evt) {\n                container.select('.nv-series-' + evt.seriesIndex + ' .nv-distx-' + evt.pointIndex)\n                    .attr('y1', evt.relativePos[1] - availableHeight);\n                container.select('.nv-series-' + evt.seriesIndex + ' .nv-disty-' + evt.pointIndex)\n                    .attr('x2', evt.relativePos[0] + distX.size());\n                tooltip.data(evt).hidden(false);\n            });\n\n            //store old scales for use in transitions on update\n            x0 = x.copy();\n            y0 = y.copy();\n\n        });\n\n        renderWatch.renderEnd('scatter with line immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.scatter = scatter;\n    chart.legend = legend;\n    chart.xAxis = xAxis;\n    chart.yAxis = yAxis;\n    chart.distX = distX;\n    chart.distY = distY;\n    chart.tooltip = tooltip;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        container:  {get: function(){return container;}, set: function(_){container=_;}},\n        showDistX:  {get: function(){return showDistX;}, set: function(_){showDistX=_;}},\n        showDistY:  {get: function(){return showDistY;}, set: function(_){showDistY=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        showXAxis:  {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis:  {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        defaultState:     {get: function(){return defaultState;}, set: function(_){defaultState=_;}},\n        noData:     {get: function(){return noData;}, set: function(_){noData=_;}},\n        duration:   {get: function(){return duration;}, set: function(_){duration=_;}},\n        showLabels: {get: function(){return showLabels;}, set: function(_){showLabels=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( (_) ? 'right' : 'left');\n        }},\n        color: {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n            distX.color(color);\n            distY.color(color);\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, scatter);\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","\nnv.models.sparkline = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 2, right: 0, bottom: 2, left: 0}\n        , width = 400\n        , height = 32\n        , container = null\n        , animate = true\n        , x = d3.scale.linear()\n        , y = d3.scale.linear()\n        , getX = function(d) { return d.x }\n        , getY = function(d) { return d.y }\n        , color = nv.utils.getColor(['#000'])\n        , xDomain\n        , yDomain\n        , xRange\n        , yRange\n        , showMinMaxPoints = true\n        , showCurrentPoint = true\n        , dispatch = d3.dispatch('renderEnd')\n        ;\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n    \n    function chart(selection) {\n        renderWatch.reset();\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                availableHeight = height - margin.top - margin.bottom;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            // Setup Scales\n            x   .domain(xDomain || d3.extent(data, getX ))\n                .range(xRange || [0, availableWidth]);\n\n            y   .domain(yDomain || d3.extent(data, getY ))\n                .range(yRange || [availableHeight, 0]);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-sparkline').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-sparkline');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')')\n\n            var paths = wrap.selectAll('path')\n                .data(function(d) { return [d] });\n            paths.enter().append('path');\n            paths.exit().remove();\n            paths\n                .style('stroke', function(d,i) { return d.color || color(d, i) })\n                .attr('d', d3.svg.line()\n                    .x(function(d,i) { return x(getX(d,i)) })\n                    .y(function(d,i) { return y(getY(d,i)) })\n            );\n\n            // TODO: Add CURRENT data point (Need Min, Mac, Current / Most recent)\n            var points = wrap.selectAll('circle.nv-point')\n                .data(function(data) {\n                    var yValues = data.map(function(d, i) { return getY(d,i); });\n                    function pointIndex(index) {\n                        if (index != -1) {\n                            var result = data[index];\n                            result.pointIndex = index;\n                            return result;\n                        } else {\n                            return null;\n                        }\n                    }\n                    var maxPoint = pointIndex(yValues.lastIndexOf(y.domain()[1])),\n                        minPoint = pointIndex(yValues.indexOf(y.domain()[0])),\n                        currentPoint = pointIndex(yValues.length - 1);\n                    return [(showMinMaxPoints ? minPoint : null), (showMinMaxPoints ? maxPoint : null), (showCurrentPoint ? currentPoint : null)].filter(function (d) {return d != null;});\n                });\n            points.enter().append('circle');\n            points.exit().remove();\n            points\n                .attr('cx', function(d,i) { return x(getX(d,d.pointIndex)) })\n                .attr('cy', function(d,i) { return y(getY(d,d.pointIndex)) })\n                .attr('r', 2)\n                .attr('class', function(d,i) {\n                    return getX(d, d.pointIndex) == x.domain()[1] ? 'nv-point nv-currentValue' :\n                            getY(d, d.pointIndex) == y.domain()[0] ? 'nv-point nv-minValue' : 'nv-point nv-maxValue'\n                });\n        });\n        \n        renderWatch.renderEnd('sparkline immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:            {get: function(){return width;}, set: function(_){width=_;}},\n        height:           {get: function(){return height;}, set: function(_){height=_;}},\n        xDomain:          {get: function(){return xDomain;}, set: function(_){xDomain=_;}},\n        yDomain:          {get: function(){return yDomain;}, set: function(_){yDomain=_;}},\n        xRange:           {get: function(){return xRange;}, set: function(_){xRange=_;}},\n        yRange:           {get: function(){return yRange;}, set: function(_){yRange=_;}},\n        xScale:           {get: function(){return x;}, set: function(_){x=_;}},\n        yScale:           {get: function(){return y;}, set: function(_){y=_;}},\n        animate:          {get: function(){return animate;}, set: function(_){animate=_;}},\n        showMinMaxPoints: {get: function(){return showMinMaxPoints;}, set: function(_){showMinMaxPoints=_;}},\n        showCurrentPoint: {get: function(){return showCurrentPoint;}, set: function(_){showCurrentPoint=_;}},\n\n        //functor options\n        x: {get: function(){return getX;}, set: function(_){getX=d3.functor(_);}},\n        y: {get: function(){return getY;}, set: function(_){getY=d3.functor(_);}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }}\n    });\n\n    chart.dispatch = dispatch;\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","\nnv.models.sparklinePlus = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var sparkline = nv.models.sparkline();\n\n    var margin = {top: 15, right: 100, bottom: 10, left: 50}\n        , width = null\n        , height = null\n        , x\n        , y\n        , index = []\n        , paused = false\n        , xTickFormat = d3.format(',r')\n        , yTickFormat = d3.format(',.2f')\n        , showLastValue = true\n        , alignValue = true\n        , rightAlignValue = false\n        , noData = null\n        , dispatch = d3.dispatch('renderEnd')\n        ;\n        \n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(sparkline);\n        selection.each(function(data) {\n            var container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() { container.call(chart); };\n            chart.container = this;\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            var currentValue = sparkline.y()(data[data.length-1], data.length-1);\n\n            // Setup Scales\n            x = sparkline.xScale();\n            y = sparkline.yScale();\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-sparklineplus').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-sparklineplus');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-sparklineWrap');\n            gEnter.append('g').attr('class', 'nv-valueWrap');\n            gEnter.append('g').attr('class', 'nv-hoverArea');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            // Main Chart Component(s)\n            var sparklineWrap = g.select('.nv-sparklineWrap');\n\n            sparkline.width(availableWidth).height(availableHeight);\n            sparklineWrap.call(sparkline);\n\n            if (showLastValue) {\n                var valueWrap = g.select('.nv-valueWrap');\n                var value = valueWrap.selectAll('.nv-currentValue')\n                    .data([currentValue]);\n\n                value.enter().append('text').attr('class', 'nv-currentValue')\n                    .attr('dx', rightAlignValue ? -8 : 8)\n                    .attr('dy', '.9em')\n                    .style('text-anchor', rightAlignValue ? 'end' : 'start');\n\n                value\n                    .attr('x', availableWidth + (rightAlignValue ? margin.right : 0))\n                    .attr('y', alignValue ? function (d) {\n                        return y(d)\n                    } : 0)\n                    .style('fill', sparkline.color()(data[data.length - 1], data.length - 1))\n                    .text(yTickFormat(currentValue));\n            }\n\n            gEnter.select('.nv-hoverArea').append('rect')\n                .on('mousemove', sparklineHover)\n                .on('click', function() { paused = !paused })\n                .on('mouseout', function() { index = []; updateValueLine(); });\n\n            g.select('.nv-hoverArea rect')\n                .attr('transform', function(d) { return 'translate(' + -margin.left + ',' + -margin.top + ')' })\n                .attr('width', availableWidth + margin.left + margin.right)\n                .attr('height', availableHeight + margin.top);\n\n            //index is currently global (within the chart), may or may not keep it that way\n            function updateValueLine() {\n                if (paused) return;\n\n                var hoverValue = g.selectAll('.nv-hoverValue').data(index);\n\n                var hoverEnter = hoverValue.enter()\n                    .append('g').attr('class', 'nv-hoverValue')\n                    .style('stroke-opacity', 0)\n                    .style('fill-opacity', 0);\n\n                hoverValue.exit()\n                    .transition().duration(250)\n                    .style('stroke-opacity', 0)\n                    .style('fill-opacity', 0)\n                    .remove();\n\n                hoverValue\n                    .attr('transform', function(d) { return 'translate(' + x(sparkline.x()(data[d],d)) + ',0)' })\n                    .transition().duration(250)\n                    .style('stroke-opacity', 1)\n                    .style('fill-opacity', 1);\n\n                if (!index.length) return;\n\n                hoverEnter.append('line')\n                    .attr('x1', 0)\n                    .attr('y1', -margin.top)\n                    .attr('x2', 0)\n                    .attr('y2', availableHeight);\n\n                hoverEnter.append('text').attr('class', 'nv-xValue')\n                    .attr('x', -6)\n                    .attr('y', -margin.top)\n                    .attr('text-anchor', 'end')\n                    .attr('dy', '.9em');\n\n                g.select('.nv-hoverValue .nv-xValue')\n                    .text(xTickFormat(sparkline.x()(data[index[0]], index[0])));\n\n                hoverEnter.append('text').attr('class', 'nv-yValue')\n                    .attr('x', 6)\n                    .attr('y', -margin.top)\n                    .attr('text-anchor', 'start')\n                    .attr('dy', '.9em');\n\n                g.select('.nv-hoverValue .nv-yValue')\n                    .text(yTickFormat(sparkline.y()(data[index[0]], index[0])));\n            }\n\n            function sparklineHover() {\n                if (paused) return;\n\n                var pos = d3.mouse(this)[0] - margin.left;\n\n                function getClosestIndex(data, x) {\n                    var distance = Math.abs(sparkline.x()(data[0], 0) - x);\n                    var closestIndex = 0;\n                    for (var i = 0; i < data.length; i++){\n                        if (Math.abs(sparkline.x()(data[i], i) - x) < distance) {\n                            distance = Math.abs(sparkline.x()(data[i], i) - x);\n                            closestIndex = i;\n                        }\n                    }\n                    return closestIndex;\n                }\n\n                index = [getClosestIndex(data, Math.round(x.invert(pos)))];\n                updateValueLine();\n            }\n\n        });\n        renderWatch.renderEnd('sparklinePlus immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.sparkline = sparkline;\n\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:           {get: function(){return width;}, set: function(_){width=_;}},\n        height:          {get: function(){return height;}, set: function(_){height=_;}},\n        xTickFormat:     {get: function(){return xTickFormat;}, set: function(_){xTickFormat=_;}},\n        yTickFormat:     {get: function(){return yTickFormat;}, set: function(_){yTickFormat=_;}},\n        showLastValue:   {get: function(){return showLastValue;}, set: function(_){showLastValue=_;}},\n        alignValue:      {get: function(){return alignValue;}, set: function(_){alignValue=_;}},\n        rightAlignValue: {get: function(){return rightAlignValue;}, set: function(_){rightAlignValue=_;}},\n        noData:          {get: function(){return noData;}, set: function(_){noData=_;}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, sparkline);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.stackedArea = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 960\n        , height = 500\n        , color = nv.utils.defaultColor() // a function that computes the color\n        , id = Math.floor(Math.random() * 100000) //Create semi-unique ID incase user doesn't selet one\n        , container = null\n        , getX = function(d) { return d.x } // accessor to get the x value from a data point\n        , getY = function(d) { return d.y } // accessor to get the y value from a data point\n        , defined = function(d,i) { return !isNaN(getY(d,i)) && getY(d,i) !== null } // allows a line to be not continuous when it is not defined\n        , style = 'stack'\n        , offset = 'zero'\n        , order = 'default'\n        , interpolate = 'linear'  // controls the line interpolation\n        , clipEdge = false // if true, masks lines within x and y scale\n        , x //can be accessed via chart.xScale()\n        , y //can be accessed via chart.yScale()\n        , scatter = nv.models.scatter()\n        , duration = 250\n        , dispatch =  d3.dispatch('areaClick', 'areaMouseover', 'areaMouseout','renderEnd', 'elementClick', 'elementMouseover', 'elementMouseout')\n        ;\n\n    scatter\n        .pointSize(2.2) // default size\n        .pointDomain([2.2, 2.2]) // all the same size by default\n    ;\n\n    /************************************\n     * offset:\n     *   'wiggle' (stream)\n     *   'zero' (stacked)\n     *   'expand' (normalize to 100%)\n     *   'silhouette' (simple centered)\n     *\n     * order:\n     *   'inside-out' (stream)\n     *   'default' (input order)\n     ************************************/\n\n    var renderWatch = nv.utils.renderWatch(dispatch, duration);\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(scatter);\n        selection.each(function(data) {\n            var availableWidth = width - margin.left - margin.right,\n                availableHeight = height - margin.top - margin.bottom;\n\n            container = d3.select(this);\n            nv.utils.initSVG(container);\n\n            // Setup Scales\n            x = scatter.xScale();\n            y = scatter.yScale();\n\n            var dataRaw = data;\n            // Injecting point index into each point because d3.layout.stack().out does not give index\n            data.forEach(function(aseries, i) {\n                aseries.seriesIndex = i;\n                aseries.values = aseries.values.map(function(d, j) {\n                    d.index = j;\n                    d.seriesIndex = i;\n                    return d;\n                });\n            });\n\n            var dataFiltered = data.filter(function(series) {\n                return !series.disabled;\n            });\n\n            data = d3.layout.stack()\n                .order(order)\n                .offset(offset)\n                .values(function(d) { return d.values })  //TODO: make values customizeable in EVERY model in this fashion\n                .x(getX)\n                .y(getY)\n                .out(function(d, y0, y) {\n                    d.display = {\n                        y: y,\n                        y0: y0\n                    };\n                })\n            (dataFiltered);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-stackedarea').data([data]);\n            var wrapEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-stackedarea');\n            var defsEnter = wrapEnter.append('defs');\n            var gEnter = wrapEnter.append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-areaWrap');\n            gEnter.append('g').attr('class', 'nv-scatterWrap');\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n            \n            // If the user has not specified forceY, make sure 0 is included in the domain\n            // Otherwise, use user-specified values for forceY\n            if (scatter.forceY().length == 0) {\n                scatter.forceY().push(0);\n            }\n            \n            scatter\n                .width(availableWidth)\n                .height(availableHeight)\n                .x(getX)\n                .y(function(d) {\n                    if (d.display !== undefined) { return d.display.y + d.display.y0; }\n                })\n                .color(data.map(function(d,i) {\n                    d.color = d.color || color(d, d.seriesIndex);\n                    return d.color;\n                }));\n\n            var scatterWrap = g.select('.nv-scatterWrap')\n                .datum(data);\n\n            scatterWrap.call(scatter);\n\n            defsEnter.append('clipPath')\n                .attr('id', 'nv-edge-clip-' + id)\n                .append('rect');\n\n            wrap.select('#nv-edge-clip-' + id + ' rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            g.attr('clip-path', clipEdge ? 'url(#nv-edge-clip-' + id + ')' : '');\n\n            var area = d3.svg.area()\n                .defined(defined)\n                .x(function(d,i)  { return x(getX(d,i)) })\n                .y0(function(d) {\n                    return y(d.display.y0)\n                })\n                .y1(function(d) {\n                    return y(d.display.y + d.display.y0)\n                })\n                .interpolate(interpolate);\n\n            var zeroArea = d3.svg.area()\n                .defined(defined)\n                .x(function(d,i)  { return x(getX(d,i)) })\n                .y0(function(d) { return y(d.display.y0) })\n                .y1(function(d) { return y(d.display.y0) });\n\n            var path = g.select('.nv-areaWrap').selectAll('path.nv-area')\n                .data(function(d) { return d });\n\n            path.enter().append('path').attr('class', function(d,i) { return 'nv-area nv-area-' + i })\n                .attr('d', function(d,i){\n                    return zeroArea(d.values, d.seriesIndex);\n                })\n                .on('mouseover', function(d,i) {\n                    d3.select(this).classed('hover', true);\n                    dispatch.areaMouseover({\n                        point: d,\n                        series: d.key,\n                        pos: [d3.event.pageX, d3.event.pageY],\n                        seriesIndex: d.seriesIndex\n                    });\n                })\n                .on('mouseout', function(d,i) {\n                    d3.select(this).classed('hover', false);\n                    dispatch.areaMouseout({\n                        point: d,\n                        series: d.key,\n                        pos: [d3.event.pageX, d3.event.pageY],\n                        seriesIndex: d.seriesIndex\n                    });\n                })\n                .on('click', function(d,i) {\n                    d3.select(this).classed('hover', false);\n                    dispatch.areaClick({\n                        point: d,\n                        series: d.key,\n                        pos: [d3.event.pageX, d3.event.pageY],\n                        seriesIndex: d.seriesIndex\n                    });\n                });\n\n            path.exit().remove();\n            path.style('fill', function(d,i){\n                    return d.color || color(d, d.seriesIndex)\n                })\n                .style('stroke', function(d,i){ return d.color || color(d, d.seriesIndex) });\n            path.watchTransition(renderWatch,'stackedArea path')\n                .attr('d', function(d,i) {\n                    return area(d.values,i)\n                });\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            scatter.dispatch.on('elementMouseover.area', function(e) {\n                g.select('.nv-chart-' + id + ' .nv-area-' + e.seriesIndex).classed('hover', true);\n            });\n            scatter.dispatch.on('elementMouseout.area', function(e) {\n                g.select('.nv-chart-' + id + ' .nv-area-' + e.seriesIndex).classed('hover', false);\n            });\n\n            //Special offset functions\n            chart.d3_stackedOffset_stackPercent = function(stackData) {\n                var n = stackData.length,    //How many series\n                    m = stackData[0].length,     //how many points per series\n                    i,\n                    j,\n                    o,\n                    y0 = [];\n\n                for (j = 0; j < m; ++j) { //Looping through all points\n                    for (i = 0, o = 0; i < dataRaw.length; i++) { //looping through all series\n                        o += getY(dataRaw[i].values[j]); //total y value of all series at a certian point in time.\n                    }\n\n                    if (o) for (i = 0; i < n; i++) { //(total y value of all series at point in time i) != 0\n                        stackData[i][j][1] /= o;\n                    } else { //(total y value of all series at point in time i) == 0\n                        for (i = 0; i < n; i++) {\n                            stackData[i][j][1] = 0;\n                        }\n                    }\n                }\n                for (j = 0; j < m; ++j) y0[j] = 0;\n                return y0;\n            };\n\n        });\n\n        renderWatch.renderEnd('stackedArea immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Global getters and setters\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.scatter = scatter;\n\n    scatter.dispatch.on('elementClick', function(){ dispatch.elementClick.apply(this, arguments); });\n    scatter.dispatch.on('elementMouseover', function(){ dispatch.elementMouseover.apply(this, arguments); });\n    scatter.dispatch.on('elementMouseout', function(){ dispatch.elementMouseout.apply(this, arguments); });\n\n    chart.interpolate = function(_) {\n        if (!arguments.length) return interpolate;\n        interpolate = _;\n        return chart;\n    };\n\n    chart.duration = function(_) {\n        if (!arguments.length) return duration;\n        duration = _;\n        renderWatch.reset(duration);\n        scatter.duration(duration);\n        return chart;\n    };\n\n    chart.dispatch = dispatch;\n    chart.scatter = scatter;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        defined: {get: function(){return defined;}, set: function(_){defined=_;}},\n        clipEdge: {get: function(){return clipEdge;}, set: function(_){clipEdge=_;}},\n        offset:      {get: function(){return offset;}, set: function(_){offset=_;}},\n        order:    {get: function(){return order;}, set: function(_){order=_;}},\n        interpolate:    {get: function(){return interpolate;}, set: function(_){interpolate=_;}},\n\n        // simple functor options\n        x:     {get: function(){return getX;}, set: function(_){getX = d3.functor(_);}},\n        y:     {get: function(){return getY;}, set: function(_){getY = d3.functor(_);}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n        }},\n        style: {get: function(){return style;}, set: function(_){\n            style = _;\n            switch (style) {\n                case 'stack':\n                    chart.offset('zero');\n                    chart.order('default');\n                    break;\n                case 'stream':\n                    chart.offset('wiggle');\n                    chart.order('inside-out');\n                    break;\n                case 'stream-center':\n                    chart.offset('silhouette');\n                    chart.order('inside-out');\n                    break;\n                case 'expand':\n                    chart.offset('expand');\n                    chart.order('default');\n                    break;\n                case 'stack_percent':\n                    chart.offset(chart.d3_stackedOffset_stackPercent);\n                    chart.order('default');\n                    break;\n            }\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            scatter.duration(duration);\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, scatter);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n","\nnv.models.stackedAreaChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var stacked = nv.models.stackedArea()\n        , xAxis = nv.models.axis()\n        , yAxis = nv.models.axis()\n        , legend = nv.models.legend()\n        , controls = nv.models.legend()\n        , interactiveLayer = nv.interactiveGuideline()\n        , tooltip = nv.models.tooltip()\n        , focus = nv.models.focus(nv.models.stackedArea())\n        ;\n\n    var margin = {top: 10, right: 25, bottom: 50, left: 60}\n        , marginTop = null\n        , width = null\n        , height = null\n        , color = nv.utils.defaultColor()\n        , showControls = true\n        , showLegend = true\n        , legendPosition = 'top'\n        , showXAxis = true\n        , showYAxis = true\n        , rightAlignYAxis = false\n        , focusEnable = false\n        , useInteractiveGuideline = false\n        , showTotalInTooltip = true\n        , totalLabel = 'TOTAL'\n        , x //can be accessed via chart.xScale()\n        , y //can be accessed via chart.yScale()\n        , state = nv.utils.state()\n        , defaultState = null\n        , noData = null\n        , dispatch = d3.dispatch('stateChange', 'changeState','renderEnd')\n        , controlWidth = 250\n        , controlOptions = ['Stacked','Stream','Expanded']\n        , controlLabels = {}\n        , duration = 250\n        ;\n\n    state.style = stacked.style();\n    xAxis.orient('bottom').tickPadding(7);\n    yAxis.orient((rightAlignYAxis) ? 'right' : 'left');\n\n    tooltip\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        })\n        .valueFormatter(function(d, i) {\n            return yAxis.tickFormat()(d, i);\n        });\n\n    interactiveLayer.tooltip\n        .headerFormatter(function(d, i) {\n            return xAxis.tickFormat()(d, i);\n        })\n        .valueFormatter(function(d, i) {\n            return d == null ? \"N/A\" : yAxis.tickFormat()(d, i);\n        });\n\n    var oldYTickFormat = null,\n        oldValueFormatter = null;\n\n    controls.updateState(false);\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n    var style = stacked.style();\n\n    var stateGetter = function(data) {\n        return function(){\n            return {\n                active: data.map(function(d) { return !d.disabled }),\n                style: stacked.style()\n            };\n        }\n    };\n\n    var stateSetter = function(data) {\n        return function(state) {\n            if (state.style !== undefined)\n                style = state.style;\n            if (state.active !== undefined)\n                data.forEach(function(series,i) {\n                    series.disabled = !state.active[i];\n                });\n        }\n    };\n\n    var percentFormatter = d3.format('%');\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(stacked);\n        if (showXAxis) renderWatch.models(xAxis);\n        if (showYAxis) renderWatch.models(yAxis);\n\n        selection.each(function(data) {\n            var container = d3.select(this),\n                that = this;\n            nv.utils.initSVG(container);\n\n            var availableWidth = nv.utils.availableWidth(width, container, margin),\n                availableHeight = nv.utils.availableHeight(height, container, margin) - (focusEnable ? focus.height() : 0);\n\n            chart.update = function() { container.transition().duration(duration).call(chart); };\n            chart.container = this;\n\n            state\n                .setter(stateSetter(data), chart.update)\n                .getter(stateGetter(data))\n                .update();\n\n            // DEPRECATED set state.disabled\n            state.disabled = data.map(function(d) { return !!d.disabled });\n\n            if (!defaultState) {\n                var key;\n                defaultState = {};\n                for (key in state) {\n                    if (state[key] instanceof Array)\n                        defaultState[key] = state[key].slice(0);\n                    else\n                        defaultState[key] = state[key];\n                }\n            }\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length || !data.filter(function(d) { return d.values.length }).length) {\n                nv.utils.noData(chart, container)\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n            // Setup Scales\n            x = stacked.xScale();\n            y = stacked.yScale();\n\n            // Setup containers and skeleton of chart\n            var wrap = container.selectAll('g.nv-wrap.nv-stackedAreaChart').data([data]);\n            var gEnter = wrap.enter().append('g').attr('class', 'nvd3 nv-wrap nv-stackedAreaChart').append('g');\n            var g = wrap.select('g');\n\n            gEnter.append('g').attr('class', 'nv-legendWrap');\n            gEnter.append('g').attr('class', 'nv-controlsWrap');\n\n            var focusEnter = gEnter.append('g').attr('class', 'nv-focus');\n            focusEnter.append('g').attr('class', 'nv-background').append('rect');\n            focusEnter.append('g').attr('class', 'nv-x nv-axis');\n            focusEnter.append('g').attr('class', 'nv-y nv-axis');\n            focusEnter.append('g').attr('class', 'nv-stackedWrap');\n            focusEnter.append('g').attr('class', 'nv-interactive');\n\n            // g.select(\"rect\").attr(\"width\",availableWidth).attr(\"height\",availableHeight);\n\n            var contextEnter = gEnter.append('g').attr('class', 'nv-focusWrap');\n\n            // Legend\n            if (!showLegend) {\n                g.select('.nv-legendWrap').selectAll('*').remove();\n            } else {\n                var legendWidth = (showControls && legendPosition === 'top') ? availableWidth - controlWidth : availableWidth;\n\n                legend.width(legendWidth);\n                g.select('.nv-legendWrap').datum(data).call(legend);\n\n                if (legendPosition === 'bottom') {\n                \t// constant from axis.js, plus some margin for better layout\n                \tvar xAxisHeight = (showXAxis ? 12 : 0) + 10;\n                   \tmargin.bottom = Math.max(legend.height() + xAxisHeight, margin.bottom);\n                   \tavailableHeight = nv.utils.availableHeight(height, container, margin) - (focusEnable ? focus.height() : 0);\n                \tvar legendTop = availableHeight + xAxisHeight;\n                    g.select('.nv-legendWrap')\n                        .attr('transform', 'translate(0,' + legendTop +')');\n                } else if (legendPosition === 'top') {\n                    if (!marginTop && margin.top != legend.height()) {\n                        margin.top = legend.height();\n                        availableHeight = nv.utils.availableHeight(height, container, margin) - (focusEnable ? focus.height() : 0);\n                    }\n\n                    g.select('.nv-legendWrap')\n                    \t.attr('transform', 'translate(' + (availableWidth-legendWidth) + ',' + (-margin.top) +')');\n                }\n            }\n\n            // Controls\n            if (!showControls) {\n                 g.select('.nv-controlsWrap').selectAll('*').remove();\n            } else {\n                var controlsData = [\n                    {\n                        key: controlLabels.stacked || 'Stacked',\n                        metaKey: 'Stacked',\n                        disabled: stacked.style() != 'stack',\n                        style: 'stack'\n                    },\n                    {\n                        key: controlLabels.stream || 'Stream',\n                        metaKey: 'Stream',\n                        disabled: stacked.style() != 'stream',\n                        style: 'stream'\n                    },\n                    {\n                        key: controlLabels.expanded || 'Expanded',\n                        metaKey: 'Expanded',\n                        disabled: stacked.style() != 'expand',\n                        style: 'expand'\n                    },\n                    {\n                        key: controlLabels.stack_percent || 'Stack %',\n                        metaKey: 'Stack_Percent',\n                        disabled: stacked.style() != 'stack_percent',\n                        style: 'stack_percent'\n                    }\n                ];\n\n                controlWidth = (controlOptions.length/3) * 260;\n                controlsData = controlsData.filter(function(d) {\n                    return controlOptions.indexOf(d.metaKey) !== -1;\n                });\n\n                controls\n                    .width( controlWidth )\n                    .color(['#444', '#444', '#444']);\n\n                g.select('.nv-controlsWrap')\n                    .datum(controlsData)\n                    .call(controls);\n\n                var requiredTop = Math.max(controls.height(), showLegend && (legendPosition === 'top') ? legend.height() : 0);\n\n                if ( margin.top != requiredTop ) {\n                    margin.top = requiredTop;\n                    availableHeight = nv.utils.availableHeight(height, container, margin) - (focusEnable ? focus.height() : 0);\n                }\n\n                g.select('.nv-controlsWrap')\n                    .attr('transform', 'translate(0,' + (-margin.top) +')');\n            }\n\n            wrap.attr('transform', 'translate(' + margin.left + ',' + margin.top + ')');\n\n            if (rightAlignYAxis) {\n                g.select(\".nv-y.nv-axis\")\n                    .attr(\"transform\", \"translate(\" + availableWidth + \",0)\");\n            }\n\n            //Set up interactive layer\n            if (useInteractiveGuideline) {\n                interactiveLayer\n                    .width(availableWidth)\n                    .height(availableHeight)\n                    .margin({left: margin.left, top: margin.top})\n                    .svgContainer(container)\n                    .xScale(x);\n                wrap.select(\".nv-interactive\").call(interactiveLayer);\n            }\n\n            g.select('.nv-focus .nv-background rect')\n                .attr('width', availableWidth)\n                .attr('height', availableHeight);\n\n            stacked\n                .width(availableWidth)\n                .height(availableHeight)\n                .color(data.map(function(d,i) {\n                    return d.color || color(d, i);\n                }).filter(function(d,i) { return !data[i].disabled; }));\n\n            var stackedWrap = g.select('.nv-focus .nv-stackedWrap')\n                .datum(data.filter(function(d) { return !d.disabled; }));\n\n            // Setup Axes\n            if (showXAxis) {\n                xAxis.scale(x)\n                    ._ticks( nv.utils.calcTicksX(availableWidth/100, data) )\n                    .tickSize( -availableHeight, 0);\n            }\n\n            if (showYAxis) {\n                var ticks;\n                if (stacked.offset() === 'wiggle') {\n                    ticks = 0;\n                }\n                else {\n                    ticks = nv.utils.calcTicksY(availableHeight/36, data);\n                }\n                yAxis.scale(y)\n                    ._ticks(ticks)\n                    .tickSize(-availableWidth, 0);\n            }\n\n            //============================================================\n            // Update Axes\n            //============================================================\n            function updateXAxis() {\n                if(showXAxis) {\n                    g.select('.nv-focus .nv-x.nv-axis')\n                        .attr('transform', 'translate(0,' + availableHeight + ')')\n                        .transition()\n                        .duration(duration)\n                        .call(xAxis)\n                        ;\n                }\n            }\n\n            function updateYAxis() {\n                if(showYAxis) {\n                    if (stacked.style() === 'expand' || stacked.style() === 'stack_percent') {\n                        var currentFormat = yAxis.tickFormat();\n\n                        if ( !oldYTickFormat || currentFormat !== percentFormatter )\n                            oldYTickFormat = currentFormat;\n\n                        //Forces the yAxis to use percentage in 'expand' mode.\n                        yAxis.tickFormat(percentFormatter);\n                    }\n                    else {\n                        if (oldYTickFormat) {\n                            yAxis.tickFormat(oldYTickFormat);\n                            oldYTickFormat = null;\n                        }\n                    }\n\n                    g.select('.nv-focus .nv-y.nv-axis')\n                    .transition().duration(0)\n                    .call(yAxis);\n                }\n            }\n\n            //============================================================\n            // Update Focus\n            //============================================================\n            if(!focusEnable) {\n                stackedWrap.transition().call(stacked);\n                updateXAxis();\n                updateYAxis();\n            } else {\n                focus.width(availableWidth);\n                g.select('.nv-focusWrap')\n                    .attr('transform', 'translate(0,' + ( availableHeight + margin.bottom + focus.margin().top) + ')')\n                    .datum(data.filter(function(d) { return !d.disabled; }))\n                    .call(focus);\n                var extent = focus.brush.empty() ? focus.xDomain() : focus.brush.extent();\n                if(extent !== null){\n                    onBrush(extent);\n                }\n            }\n\n            //============================================================\n            // Event Handling/Dispatching (in chart's scope)\n            //------------------------------------------------------------\n\n            stacked.dispatch.on('areaClick.toggle', function(e) {\n                if (data.filter(function(d) { return !d.disabled }).length === 1)\n                    data.forEach(function(d) {\n                        d.disabled = false;\n                    });\n                else\n                    data.forEach(function(d,i) {\n                        d.disabled = (i != e.seriesIndex);\n                    });\n\n                state.disabled = data.map(function(d) { return !!d.disabled });\n                dispatch.stateChange(state);\n\n                chart.update();\n            });\n\n            legend.dispatch.on('stateChange', function(newState) {\n                for (var key in newState)\n                    state[key] = newState[key];\n                dispatch.stateChange(state);\n                chart.update();\n            });\n\n            controls.dispatch.on('legendClick', function(d,i) {\n                if (!d.disabled) return;\n\n                controlsData = controlsData.map(function(s) {\n                    s.disabled = true;\n                    return s;\n                });\n                d.disabled = false;\n\n                stacked.style(d.style);\n\n\n                state.style = stacked.style();\n                dispatch.stateChange(state);\n\n                chart.update();\n            });\n\n            interactiveLayer.dispatch.on('elementMousemove', function(e) {\n                stacked.clearHighlights();\n                var singlePoint, pointIndex, pointXLocation, allData = [], valueSum = 0, allNullValues = true;\n                data\n                    .filter(function(series, i) {\n                        series.seriesIndex = i;\n                        return !series.disabled;\n                    })\n                    .forEach(function(series,i) {\n                        pointIndex = nv.interactiveBisect(series.values, e.pointXValue, chart.x());\n                        var point = series.values[pointIndex];\n                        var pointYValue = chart.y()(point, pointIndex);\n                        if (pointYValue != null) {\n                            stacked.highlightPoint(i, pointIndex, true);\n                        }\n                        if (typeof point === 'undefined') return;\n                        if (typeof singlePoint === 'undefined') singlePoint = point;\n                        if (typeof pointXLocation === 'undefined') pointXLocation = chart.xScale()(chart.x()(point,pointIndex));\n\n                        //If we are in 'expand' mode, use the stacked percent value instead of raw value.\n                        var tooltipValue = (stacked.style() == 'expand') ? point.display.y : chart.y()(point,pointIndex);\n                        allData.push({\n                            key: series.key,\n                            value: tooltipValue,\n                            color: color(series,series.seriesIndex),\n                            point: point\n                        });\n\n                        if (showTotalInTooltip && stacked.style() != 'expand' && tooltipValue != null) {\n                          valueSum += tooltipValue;\n                          allNullValues = false;\n                        };\n                    });\n\n                allData.reverse();\n\n                //Highlight the tooltip entry based on which stack the mouse is closest to.\n                if (allData.length > 2) {\n                    var yValue = chart.yScale().invert(e.mouseY);\n                    var yDistMax = Infinity, indexToHighlight = null;\n                    allData.forEach(function(series,i) {\n\n                        //To handle situation where the stacked area chart is negative, we need to use absolute values\n                        //when checking if the mouse Y value is within the stack area.\n                        yValue = Math.abs(yValue);\n                        var stackedY0 = Math.abs(series.point.display.y0);\n                        var stackedY = Math.abs(series.point.display.y);\n                        if ( yValue >= stackedY0 && yValue <= (stackedY + stackedY0))\n                        {\n                            indexToHighlight = i;\n                            return;\n                        }\n                    });\n                    if (indexToHighlight != null)\n                        allData[indexToHighlight].highlight = true;\n                }\n\n                //If we are not in 'expand' mode, add a 'Total' row to the tooltip.\n                if (showTotalInTooltip && stacked.style() != 'expand' && allData.length >= 2 && !allNullValues) {\n                    allData.push({\n                        key: totalLabel,\n                        value: valueSum,\n                        total: true\n                    });\n                }\n\n                var xValue = chart.x()(singlePoint,pointIndex);\n\n                var valueFormatter = interactiveLayer.tooltip.valueFormatter();\n                // Keeps track of the tooltip valueFormatter if the chart changes to expanded view\n                if (stacked.style() === 'expand' || stacked.style() === 'stack_percent') {\n                    if ( !oldValueFormatter ) {\n                        oldValueFormatter = valueFormatter;\n                    }\n                    //Forces the tooltip to use percentage in 'expand' mode.\n                    valueFormatter = d3.format(\".1%\");\n                }\n                else {\n                    if (oldValueFormatter) {\n                        valueFormatter = oldValueFormatter;\n                        oldValueFormatter = null;\n                    }\n                }\n\n                interactiveLayer.tooltip\n                    .valueFormatter(valueFormatter)\n                    .data(\n                    {\n                        value: xValue,\n                        series: allData\n                    }\n                )();\n\n                interactiveLayer.renderGuideLine(pointXLocation);\n\n            });\n\n            interactiveLayer.dispatch.on(\"elementMouseout\",function(e) {\n                stacked.clearHighlights();\n            });\n\n            /* Update `main' graph on brush update. */\n            focus.dispatch.on(\"onBrush\", function(extent) {\n                onBrush(extent);\n            });\n\n            // Update chart from a state object passed to event handler\n            dispatch.on('changeState', function(e) {\n\n                if (typeof e.disabled !== 'undefined' && data.length === e.disabled.length) {\n                    data.forEach(function(series,i) {\n                        series.disabled = e.disabled[i];\n                    });\n\n                    state.disabled = e.disabled;\n                }\n\n                if (typeof e.style !== 'undefined') {\n                    stacked.style(e.style);\n                    style = e.style;\n                }\n\n                chart.update();\n            });\n\n            //============================================================\n            // Functions\n            //------------------------------------------------------------\n\n            function onBrush(extent) {\n                // Update Main (Focus)\n                var stackedWrap = g.select('.nv-focus .nv-stackedWrap')\n                    .datum(\n                    data.filter(function(d) { return !d.disabled; })\n                        .map(function(d,i) {\n                            return {\n                                key: d.key,\n                                area: d.area,\n                                classed: d.classed,\n                                values: d.values.filter(function(d,i) {\n                                    return stacked.x()(d,i) >= extent[0] && stacked.x()(d,i) <= extent[1];\n                                }),\n                                disableTooltip: d.disableTooltip\n                            };\n                        })\n                );\n                stackedWrap.transition().duration(duration).call(stacked);\n\n                // Update Main (Focus) Axes\n                updateXAxis();\n                updateYAxis();\n            }\n\n        });\n\n        renderWatch.renderEnd('stacked Area chart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    stacked.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt.point['x'] = stacked.x()(evt.point);\n        evt.point['y'] = stacked.y()(evt.point);\n        tooltip.data(evt).hidden(false);\n    });\n\n    stacked.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true)\n    });\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.stacked = stacked;\n    chart.legend = legend;\n    chart.controls = controls;\n    chart.xAxis = xAxis;\n    chart.x2Axis = focus.xAxis;\n    chart.yAxis = yAxis;\n    chart.y2Axis = focus.yAxis;\n    chart.interactiveLayer = interactiveLayer;\n    chart.tooltip = tooltip;\n    chart.focus = focus;\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        showLegend: {get: function(){return showLegend;}, set: function(_){showLegend=_;}},\n        legendPosition: {get: function(){return legendPosition;}, set: function(_){legendPosition=_;}},\n        showXAxis:      {get: function(){return showXAxis;}, set: function(_){showXAxis=_;}},\n        showYAxis:    {get: function(){return showYAxis;}, set: function(_){showYAxis=_;}},\n        defaultState:    {get: function(){return defaultState;}, set: function(_){defaultState=_;}},\n        noData:    {get: function(){return noData;}, set: function(_){noData=_;}},\n        showControls:    {get: function(){return showControls;}, set: function(_){showControls=_;}},\n        controlLabels:    {get: function(){return controlLabels;}, set: function(_){controlLabels=_;}},\n        controlOptions:    {get: function(){return controlOptions;}, set: function(_){controlOptions=_;}},\n        showTotalInTooltip:      {get: function(){return showTotalInTooltip;}, set: function(_){showTotalInTooltip=_;}},\n        totalLabel:      {get: function(){return totalLabel;}, set: function(_){totalLabel=_;}},\n        focusEnable:    {get: function(){return focusEnable;}, set: function(_){focusEnable=_;}},\n        focusHeight:     {get: function(){return focus.height();}, set: function(_){focus.height(_);}},\n        brushExtent: {get: function(){return focus.brushExtent();}, set: function(_){focus.brushExtent(_);}},\n\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            if (_.top !== undefined) {\n                margin.top = _.top;\n                marginTop = _.top;\n            }\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n        }},\n        focusMargin: {get: function(){return focus.margin}, set: function(_){\n            focus.margin.top    = _.top    !== undefined ? _.top    : focus.margin.top;\n            focus.margin.right  = _.right  !== undefined ? _.right  : focus.margin.right;\n            focus.margin.bottom = _.bottom !== undefined ? _.bottom : focus.margin.bottom;\n            focus.margin.left   = _.left   !== undefined ? _.left   : focus.margin.left;\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            stacked.duration(duration);\n            xAxis.duration(duration);\n            yAxis.duration(duration);\n        }},\n        color:  {get: function(){return color;}, set: function(_){\n            color = nv.utils.getColor(_);\n            legend.color(color);\n            stacked.color(color);\n            focus.color(color);\n        }},\n        x: {get: function(){return stacked.x();}, set: function(_){\n            stacked.x(_);\n            focus.x(_);\n        }},\n        y: {get: function(){return stacked.y();}, set: function(_){\n            stacked.y(_);\n            focus.y(_);\n        }},\n        rightAlignYAxis: {get: function(){return rightAlignYAxis;}, set: function(_){\n            rightAlignYAxis = _;\n            yAxis.orient( rightAlignYAxis ? 'right' : 'left');\n        }},\n        useInteractiveGuideline: {get: function(){return useInteractiveGuideline;}, set: function(_){\n            useInteractiveGuideline = !!_;\n            chart.interactive(!_);\n            chart.useVoronoi(!_);\n            stacked.scatter.interactive(!_);\n        }}\n    });\n\n    nv.utils.inheritOptions(chart, stacked);\n    nv.utils.initOptions(chart);\n\n    return chart;\n};\n\nnv.models.stackedAreaWithFocusChart = function() {\n  return nv.models.stackedAreaChart()\n    .margin({ bottom: 30 })\n    .focusEnable( true );\n};\n","// based on http://bl.ocks.org/kerryrodden/477c1bfb081b783f80ad\nnv.models.sunburst = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var margin = {top: 0, right: 0, bottom: 0, left: 0}\n        , width = 600\n        , height = 600\n        , mode = \"count\"\n        , modes = {count: function(d) { return 1; }, value: function(d) { return d.value || d.size }, size: function(d) { return d.value || d.size }}\n        , id = Math.floor(Math.random() * 10000) //Create semi-unique ID in case user doesn't select one\n        , container = null\n        , color = nv.utils.defaultColor()\n        , showLabels = false\n        , labelFormat = function(d){if(mode === 'count'){return d.name + ' #' + d.value}else{return d.name + ' ' + (d.value || d.size)}}\n        , labelThreshold = 0.02\n        , sort = function(d1, d2){return d1.name > d2.name;}\n        , key = function(d,i){return d.name;}\n        , groupColorByParent = true\n        , duration = 500\n        , dispatch = d3.dispatch('chartClick', 'elementClick', 'elementDblClick', 'elementMousemove', 'elementMouseover', 'elementMouseout', 'renderEnd');\n\n    //============================================================\n    // aux functions and setup\n    //------------------------------------------------------------\n\n    var x = d3.scale.linear().range([0, 2 * Math.PI]);\n    var y = d3.scale.sqrt();\n\n    var partition = d3.layout.partition().sort(sort);\n\n    var node, availableWidth, availableHeight, radius;\n    var prevPositions = {};\n\n    var arc = d3.svg.arc()\n        .startAngle(function(d) {return Math.max(0, Math.min(2 * Math.PI, x(d.x))) })\n        .endAngle(function(d) {return Math.max(0, Math.min(2 * Math.PI, x(d.x + d.dx))) })\n        .innerRadius(function(d) {return Math.max(0, y(d.y)) })\n        .outerRadius(function(d) {return Math.max(0, y(d.y + d.dy)) });\n\n    function rotationToAvoidUpsideDown(d) {\n        var centerAngle = computeCenterAngle(d);\n        if(centerAngle > 90){\n            return 180;\n        }\n        else {\n            return 0;\n        }\n    }\n\n    function computeCenterAngle(d) {\n        var startAngle = Math.max(0, Math.min(2 * Math.PI, x(d.x)));\n        var endAngle = Math.max(0, Math.min(2 * Math.PI, x(d.x + d.dx)));\n        var centerAngle = (((startAngle + endAngle) / 2) * (180 / Math.PI)) - 90;\n        return centerAngle;\n    }\n\n    function computeNodePercentage(d) {\n        var startAngle = Math.max(0, Math.min(2 * Math.PI, x(d.x)));\n        var endAngle = Math.max(0, Math.min(2 * Math.PI, x(d.x + d.dx)));\n        return (endAngle - startAngle) / (2 * Math.PI);\n    }\n\n    function labelThresholdMatched(d) {\n        var startAngle = Math.max(0, Math.min(2 * Math.PI, x(d.x)));\n        var endAngle = Math.max(0, Math.min(2 * Math.PI, x(d.x + d.dx)));\n\n        var size = endAngle - startAngle;\n        return size > labelThreshold;\n    }\n\n    // When zooming: interpolate the scales.\n    function arcTweenZoom(e,i) {\n        var xd = d3.interpolate(x.domain(), [node.x, node.x + node.dx]),\n        yd = d3.interpolate(y.domain(), [node.y, 1]),\n        yr = d3.interpolate(y.range(), [node.y ? 20 : 0, radius]);\n\n        if (i === 0) {\n            return function() {return arc(e);}\n        }\n        else {\n            return function (t) {\n                x.domain(xd(t));\n                y.domain(yd(t)).range(yr(t));\n                return arc(e);\n            }\n        };\n    }\n\n    function arcTweenUpdate(d) {\n        var ipo = d3.interpolate({x: d.x0, dx: d.dx0, y: d.y0, dy: d.dy0}, d);\n\n        return function (t) {\n            var b = ipo(t);\n\n            d.x0 = b.x;\n            d.dx0 = b.dx;\n            d.y0 = b.y;\n            d.dy0 = b.dy;\n\n            return arc(b);\n        };\n    }\n\n    function updatePrevPosition(node) {\n        var k = key(node);\n        if(! prevPositions[k]) prevPositions[k] = {};\n        var pP = prevPositions[k];\n        pP.dx = node.dx;\n        pP.x = node.x;\n        pP.dy = node.dy;\n        pP.y = node.y;\n    }\n\n    function storeRetrievePrevPositions(nodes) {\n        nodes.forEach(function(n){\n            var k = key(n);\n            var pP = prevPositions[k];\n            //console.log(k,n,pP);\n            if( pP ){\n                n.dx0 = pP.dx;\n                n.x0 = pP.x;\n                n.dy0 = pP.dy;\n                n.y0 = pP.y;\n            }\n            else {\n                n.dx0 = n.dx;\n                n.x0 = n.x;\n                n.dy0 = n.dy;\n                n.y0 = n.y;\n            }\n            updatePrevPosition(n);\n        });\n    }\n\n    function zoomClick(d) {\n        var labels = container.selectAll('text')\n        var path = container.selectAll('path')\n\n        // fade out all text elements\n        labels.transition().attr(\"opacity\",0);\n\n        // to allow reference to the new center node\n        node = d;\n\n        path.transition()\n            .duration(duration)\n            .attrTween(\"d\", arcTweenZoom)\n            .each('end', function(e) {\n                // partially taken from here: http://bl.ocks.org/metmajer/5480307\n                // check if the animated element's data e lies within the visible angle span given in d\n                if(e.x >= d.x && e.x < (d.x + d.dx) ){\n                    if(e.depth >= d.depth){\n                        // get a selection of the associated text element\n                        var parentNode = d3.select(this.parentNode);\n                        var arcText = parentNode.select('text');\n\n                        // fade in the text element and recalculate positions\n                        arcText.transition().duration(duration)\n                        .text( function(e){return labelFormat(e) })\n                        .attr(\"opacity\", function(d){\n                            if(labelThresholdMatched(d)) {\n                                return 1;\n                            }\n                            else {\n                                return 0;\n                            }\n                        })\n                        .attr(\"transform\", function() {\n                            var width = this.getBBox().width;\n                            if(e.depth === 0)\n                            return \"translate(\" + (width / 2 * - 1) + \",0)\";\n                            else if(e.depth === d.depth){\n                                return \"translate(\" + (y(e.y) + 5) + \",0)\";\n                            }\n                            else {\n                                var centerAngle = computeCenterAngle(e);\n                                var rotation = rotationToAvoidUpsideDown(e);\n                                if (rotation === 0) {\n                                    return 'rotate('+ centerAngle +')translate(' + (y(e.y) + 5) + ',0)';\n                                }\n                                else {\n                                    return 'rotate('+ centerAngle +')translate(' + (y(e.y) + width + 5) + ',0)rotate(' + rotation + ')';\n                                }\n                            }\n                        });\n                    }\n                }\n            })\n    }\n\n    //============================================================\n    // chart function\n    //------------------------------------------------------------\n    var renderWatch = nv.utils.renderWatch(dispatch);\n\n    function chart(selection) {\n        renderWatch.reset();\n\n        selection.each(function(data) {\n            container = d3.select(this);\n            availableWidth = nv.utils.availableWidth(width, container, margin);\n            availableHeight = nv.utils.availableHeight(height, container, margin);\n            radius = Math.min(availableWidth, availableHeight) / 2;\n\n            y.range([0, radius]);\n\n            // Setup containers and skeleton of chart\n            var wrap = container.select('g.nvd3.nv-wrap.nv-sunburst');\n            if( !wrap[0][0] ) {\n                wrap = container.append('g')\n                    .attr('class', 'nvd3 nv-wrap nv-sunburst nv-chart-' + id)\n                    .attr('transform', 'translate(' + ((availableWidth / 2) + margin.left + margin.right) + ',' + ((availableHeight / 2) + margin.top + margin.bottom) + ')');\n            } else {\n                wrap.attr('transform', 'translate(' + ((availableWidth / 2) + margin.left + margin.right) + ',' + ((availableHeight / 2) + margin.top + margin.bottom) + ')');\n            }\n\n            container.on('click', function (d, i) {\n                dispatch.chartClick({\n                    data: d,\n                    index: i,\n                    pos: d3.event,\n                    id: id\n                });\n            });\n\n            partition.value(modes[mode] || modes[\"count\"]);\n\n            //reverse the drawing order so that the labels of inner\n            //arcs are drawn on top of the outer arcs.\n            var nodes = partition.nodes(data[0]).reverse()\n\n            storeRetrievePrevPositions(nodes);\n            var cG = wrap.selectAll('.arc-container').data(nodes, key)\n\n            //handle new datapoints\n            var cGE = cG.enter()\n                .append(\"g\")\n                .attr(\"class\",'arc-container')\n\n            cGE.append(\"path\")\n                .attr(\"d\", arc)\n                .style(\"fill\", function (d) {\n                    if (d.color) {\n                        return d.color;\n                    }\n                    else if (groupColorByParent) {\n                        return color((d.children ? d : d.parent).name);\n                    }\n                    else {\n                        return color(d.name);\n                    }\n                })\n                .style(\"stroke\", \"#FFF\")\n                .on(\"click\", function(d,i){\n                    zoomClick(d);\n                    dispatch.elementClick({\n                        data: d,\n                        index: i\n                    })\n                })\n                .on('mouseover', function(d,i){\n                    d3.select(this).classed('hover', true).style('opacity', 0.8);\n                    dispatch.elementMouseover({\n                        data: d,\n                        color: d3.select(this).style(\"fill\"),\n                        percent: computeNodePercentage(d)\n                    });\n                })\n                .on('mouseout', function(d,i){\n                    d3.select(this).classed('hover', false).style('opacity', 1);\n                    dispatch.elementMouseout({\n                        data: d\n                    });\n                })\n                .on('mousemove', function(d,i){\n                    dispatch.elementMousemove({\n                        data: d\n                    });\n                });\n\n            ///Iterating via each and selecting based on the this\n            ///makes it work ... a cG.selectAll('path') doesn't.\n            ///Without iteration the data (in the element) didn't update.\n            cG.each(function(d){\n                d3.select(this).select('path')\n                    .transition()\n                    .duration(duration)\n                    .attrTween('d', arcTweenUpdate);\n            });\n\n            if(showLabels){\n                //remove labels first and add them back\n                cG.selectAll('text').remove();\n\n                //this way labels are on top of newly added arcs\n                cG.append('text')\n                    .text( function(e){ return labelFormat(e)})\n                    .transition()\n                    .duration(duration)\n                    .attr(\"opacity\", function(d){\n                        if(labelThresholdMatched(d)) {\n                            return 1;\n                        }\n                        else {\n                            return 0;\n                        }\n                    })\n                    .attr(\"transform\", function(d) {\n                        var width = this.getBBox().width;\n                        if(d.depth === 0){\n                            return \"rotate(0)translate(\" + (width / 2 * -1) + \",0)\";\n                        }\n                        else {\n                            var centerAngle = computeCenterAngle(d);\n                            var rotation = rotationToAvoidUpsideDown(d);\n                            if (rotation === 0) {\n                                return 'rotate('+ centerAngle +')translate(' + (y(d.y) + 5) + ',0)';\n                            }\n                            else {\n                                return 'rotate('+ centerAngle +')translate(' + (y(d.y) + width + 5) + ',0)rotate(' + rotation + ')';\n                            }\n                        }\n                    });\n            }\n\n            //zoom out to the center when the data is updated.\n            zoomClick(nodes[nodes.length - 1])\n\n\n            //remove unmatched elements ...\n            cG.exit()\n                .transition()\n                .duration(duration)\n                .attr('opacity',0)\n                .each('end',function(d){\n                    var k = key(d);\n                    prevPositions[k] = undefined;\n                })\n                .remove();\n        });\n\n\n        renderWatch.renderEnd('sunburst immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    chart.dispatch = dispatch;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        width:      {get: function(){return width;}, set: function(_){width=_;}},\n        height:     {get: function(){return height;}, set: function(_){height=_;}},\n        mode:       {get: function(){return mode;}, set: function(_){mode=_;}},\n        id:         {get: function(){return id;}, set: function(_){id=_;}},\n        duration:   {get: function(){return duration;}, set: function(_){duration=_;}},\n        groupColorByParent: {get: function(){return groupColorByParent;}, set: function(_){groupColorByParent=!!_;}},\n        showLabels: {get: function(){return showLabels;}, set: function(_){showLabels=!!_}},\n        labelFormat: {get: function(){return labelFormat;}, set: function(_){labelFormat=_}},\n        labelThreshold: {get: function(){return labelThreshold;}, set: function(_){labelThreshold=_}},\n        sort: {get: function(){return sort;}, set: function(_){sort=_}},\n        key: {get: function(){return key;}, set: function(_){key=_}},\n        // options that require extra logic in the setter\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    != undefined ? _.top    : margin.top;\n            margin.right  = _.right  != undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom != undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   != undefined ? _.left   : margin.left;\n        }},\n        color: {get: function(){return color;}, set: function(_){\n            color=nv.utils.getColor(_);\n        }}\n    });\n\n    nv.utils.initOptions(chart);\n    return chart;\n};\n","nv.models.sunburstChart = function() {\n    \"use strict\";\n\n    //============================================================\n    // Public Variables with Default Settings\n    //------------------------------------------------------------\n\n    var sunburst = nv.models.sunburst();\n    var tooltip = nv.models.tooltip();\n\n    var margin = {top: 30, right: 20, bottom: 20, left: 20}\n        , width = null\n        , height = null\n        , color = nv.utils.defaultColor()\n        , showTooltipPercent = false\n        , id = Math.round(Math.random() * 100000)\n        , defaultState = null\n        , noData = null\n        , duration = 250\n        , dispatch = d3.dispatch('stateChange', 'changeState','renderEnd');\n\n\n    //============================================================\n    // Private Variables\n    //------------------------------------------------------------\n\n    var renderWatch = nv.utils.renderWatch(dispatch);\n\n    tooltip\n        .duration(0)\n        .headerEnabled(false)\n        .valueFormatter(function(d){return d;});\n\n    //============================================================\n    // Chart function\n    //------------------------------------------------------------\n\n    function chart(selection) {\n        renderWatch.reset();\n        renderWatch.models(sunburst);\n\n        selection.each(function(data) {\n            var container = d3.select(this);\n\n            nv.utils.initSVG(container);\n\n            var availableWidth = nv.utils.availableWidth(width, container, margin);\n            var availableHeight = nv.utils.availableHeight(height, container, margin);\n\n            chart.update = function() {\n                if (duration === 0) {\n                    container.call(chart);\n                } else {\n                    container.transition().duration(duration).call(chart);\n                }\n            };\n            chart.container = container;\n\n            // Display No Data message if there's nothing to show.\n            if (!data || !data.length) {\n                nv.utils.noData(chart, container);\n                return chart;\n            } else {\n                container.selectAll('.nv-noData').remove();\n            }\n\n            sunburst.width(availableWidth).height(availableHeight).margin(margin);\n            container.call(sunburst);\n        });\n\n        renderWatch.renderEnd('sunburstChart immediate');\n        return chart;\n    }\n\n    //============================================================\n    // Event Handling/Dispatching (out of chart's scope)\n    //------------------------------------------------------------\n\n    sunburst.dispatch.on('elementMouseover.tooltip', function(evt) {\n        evt.series = {\n            key: evt.data.name,\n            value: (evt.data.value || evt.data.size),\n            color: evt.color,\n            percent: evt.percent\n        };\n        if (!showTooltipPercent) {\n            delete evt.percent;\n            delete evt.series.percent;\n        }\n        tooltip.data(evt).hidden(false);\n    });\n\n    sunburst.dispatch.on('elementMouseout.tooltip', function(evt) {\n        tooltip.hidden(true);\n    });\n\n    sunburst.dispatch.on('elementMousemove.tooltip', function(evt) {\n        tooltip();\n    });\n\n    //============================================================\n    // Expose Public Variables\n    //------------------------------------------------------------\n\n    // expose chart's sub-components\n    chart.dispatch = dispatch;\n    chart.sunburst = sunburst;\n    chart.tooltip = tooltip;\n    chart.options = nv.utils.optionsFunc.bind(chart);\n\n    // use Object get/set functionality to map between vars and chart functions\n    chart._options = Object.create({}, {\n        // simple options, just get/set the necessary values\n        noData:             {get: function(){return noData;},               set: function(_){noData=_;}},\n        defaultState:       {get: function(){return defaultState;},         set: function(_){defaultState=_;}},\n        showTooltipPercent: {get: function(){return showTooltipPercent;},   set: function(_){showTooltipPercent=_;}},\n\n        // options that require extra logic in the setter\n        color: {get: function(){return color;}, set: function(_){\n            color = _;\n            sunburst.color(color);\n        }},\n        duration: {get: function(){return duration;}, set: function(_){\n            duration = _;\n            renderWatch.reset(duration);\n            sunburst.duration(duration);\n        }},\n        margin: {get: function(){return margin;}, set: function(_){\n            margin.top    = _.top    !== undefined ? _.top    : margin.top;\n            margin.right  = _.right  !== undefined ? _.right  : margin.right;\n            margin.bottom = _.bottom !== undefined ? _.bottom : margin.bottom;\n            margin.left   = _.left   !== undefined ? _.left   : margin.left;\n            sunburst.margin(margin);\n        }}\n    });\n    nv.utils.inheritOptions(chart, sunburst);\n    nv.utils.initOptions(chart);\n    return chart;\n\n};\n"]}
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.css b/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.css
new file mode 100644
index 0000000..271ac74
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.css
@@ -0,0 +1,60 @@
+/**
+ *   Licensed to the Apache Software Foundation (ASF) under one or more
+ *  contributor license agreements.  See the NOTICE file distributed with
+ *  this work for additional information regarding copyright ownership.
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *  (the "License"); you may not use this file except in compliance with
+ *  the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+*/
+body {
+    padding: 40px;
+    padding-top: 60px;
+}
+.starter-template {
+    padding: 40px 15px;
+    text-align: center;
+}
+
+
+.btn {
+    border: 0 none;
+    font-weight: 700;
+    letter-spacing: 1px;
+    text-transform: uppercase;
+}
+
+.btn:focus, .btn:active:focus, .btn.active:focus {
+    outline: 0 none;
+}
+
+.table-striped > tbody > tr:nth-child(2n+1).selectedtag > td:hover {
+    background-color: #3276b1;
+}
+.table-striped > tbody > tr:nth-child(2n+1).selectedtag > td {
+    background-color: #3276b1;
+}
+.tagPanel tr.selectedtag td {
+    background-color: #3276b1;
+}
+.top-buffer { margin-top:4px; }
+
+
+.sortorder:after {
+    content: '\25b2';   // BLACK UP-POINTING TRIANGLE
+}
+.sortorder.reverse:after {
+    content: '\25bc';   // BLACK DOWN-POINTING TRIANGLE
+}
+
+.wrap-table{
+    word-wrap: break-word;
+    table-layout: fixed;
+}
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.js b/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.js
new file mode 100644
index 0000000..411438a
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/ozone.js
@@ -0,0 +1,387 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function() {
+  "use strict";
+
+  var isIgnoredJmxKeys = function(key) {
+    return key == 'name' || key == 'modelerType' || key == "$$hashKey" ||
+      key.match(/tag.*/);
+  };
+  angular.module('ozone', ['nvd3', 'ngRoute']);
+  angular.module('ozone').config(function($routeProvider) {
+    $routeProvider
+      .when("/", {
+        templateUrl: "main.html"
+      })
+      .when("/metrics/rpc", {
+        template: "<rpc-metrics></rpc-metrics>"
+      })
+      .when("/config", {
+        template: "<config></config>"
+      })
+  });
+  angular.module('ozone').component('overview', {
+    templateUrl: 'static/templates/overview.html',
+    transclude: true,
+    controller: function($http) {
+      var ctrl = this;
+      $http.get("jmx?qry=Hadoop:service=*,name=*,component=ServerRuntime")
+        .then(function(result) {
+          ctrl.jmx = result.data.beans[0]
+        })
+    }
+  });
+  angular.module('ozone').component('jvmParameters', {
+    templateUrl: 'static/templates/jvm.html',
+    controller: function($http) {
+      var ctrl = this;
+      $http.get("jmx?qry=java.lang:type=Runtime")
+        .then(function(result) {
+          ctrl.jmx = result.data.beans[0];
+
+          //convert array to a map
+          var systemProperties = {};
+          for (var idx in ctrl.jmx.SystemProperties) {
+            var item = ctrl.jmx.SystemProperties[idx];
+            systemProperties[item.key.replace(/\./g, "_")] = item.value;
+          }
+          ctrl.jmx.SystemProperties = systemProperties;
+        })
+    }
+  });
+
+  angular.module('ozone').component('rpcMetrics', {
+    template: '<h1>Rpc metrics</h1><tabs>' +
+      '<pane ng-repeat="metric in $ctrl.metrics" ' +
+      'title="{{metric[\'tag.serverName\']}} ({{metric[\'tag.port\']}})">' +
+      '<rpc-metric jmxdata="metric"></rpc-metric></pane>' +
+      '</tabs>',
+    controller: function($http) {
+      var ctrl = this;
+      $http.get("jmx?qry=Hadoop:service=*,name=RpcActivityForPort*")
+        .then(function(result) {
+          ctrl.metrics = result.data.beans;
+        })
+    }
+  });
+  angular.module('ozone').component('rpcMetric', {
+    bindings: {
+      jmxdata: '<'
+    },
+    templateUrl: 'static/templates/rpc-metrics.html',
+    controller: function() {
+      var ctrl = this;
+
+
+      ctrl.percentileGraphOptions = {
+        chart: {
+          type: 'discreteBarChart',
+          height: 450,
+          margin: {
+            top: 20,
+            right: 20,
+            bottom: 50,
+            left: 55
+          },
+          x: function(d) {
+            return d.label;
+          },
+          y: function(d) {
+            return d.value;
+          },
+          showValues: true,
+          valueFormat: function(d) {
+            return d3.format(',.1f')(d);
+          },
+          duration: 500,
+          xAxis: {
+            axisLabel: 'Percentage'
+          },
+          yAxis: {
+            axisLabel: 'Latency (ms)',
+            axisLabelDistance: -10
+          }
+        }
+      };
+
+      ctrl.$onChanges = function(data) {
+        var groupedMetrics = {}
+
+        var createPercentageMetrics = function(metricName, window) {
+          groupedMetrics.percentiles = groupedMetrics['percentiles'] || {}
+          groupedMetrics.percentiles[window] = groupedMetrics.percentiles[window] || {};
+          groupedMetrics.percentiles[window][metricName] = groupedMetrics.percentiles[window][metricName] || {
+            graphdata: [{
+              key: window,
+              values: []
+            }],
+            numOps: 0
+          };
+
+        };
+        var metrics = ctrl.jmxdata;
+        for (var key in metrics) {
+          var percentile = key.match(/(.*Time)(\d+s)(\d+th)PercentileLatency/);
+          var percentileNumOps = key.match(/(.*Time)(\d+s)NumOps/);
+          var successFailures = key.match(/(.*)(Success|Failures)/);
+          var numAverages = key.match(/(.*Time)(NumOps|AvgTime)/);
+          if (percentile) {
+            var metricName = percentile[1];
+            var window = percentile[2];
+            var percentage = percentile[3]
+            createPercentageMetrics(metricName, window);
+
+
+            groupedMetrics.percentiles[window][metricName].graphdata[0]
+              .values.push({
+                label: percentage,
+                value: metrics[key]
+              })
+          } else if (successFailures) {
+            var metricName = successFailures[1];
+            groupedMetrics.successfailures = groupedMetrics['successfailures'] || {}
+            groupedMetrics.successfailures[metricName] = groupedMetrics.successfailures[metricName] || {
+              success: 0,
+              failures: 0
+            };
+            if (successFailures[2] == 'Success') {
+              groupedMetrics.successfailures[metricName].success = metrics[key];
+            } else {
+              groupedMetrics.successfailures[metricName].failures = metrics[key];
+            }
+
+          } else if (numAverages) {
+            var metricName = numAverages[1];
+            groupedMetrics.numavgs = groupedMetrics['numavgs'] || {}
+            groupedMetrics.numavgs[metricName] = groupedMetrics.numavgs[metricName] || {
+              numOps: 0,
+              avgTime: 0
+            };
+            if (numAverages[2] == 'NumOps') {
+              groupedMetrics.numavgs[metricName].numOps = metrics[key];
+            } else {
+              groupedMetrics.numavgs[metricName].avgTime = metrics[key];
+            }
+
+          } else if (percentileNumOps) {
+            var metricName = percentileNumOps[1];
+            var window = percentileNumOps[2];
+            createPercentageMetrics(metricName, window);
+            groupedMetrics.percentiles[window][metricName].numOps = metrics[key];
+          } else if (isIgnoredJmxKeys(key)) {
+            //ignore
+          } else {
+            groupedMetrics.others = groupedMetrics.others || [];
+            groupedMetrics.others.push({
+              'key': key,
+              'value': metrics[key]
+            });
+          }
+
+        }
+        ctrl.metrics = groupedMetrics;
+      };
+
+    }
+  });
+  angular.module('ozone')
+    .component('tabs', {
+      transclude: true,
+      controller: function($scope) {
+        var ctrl = this;
+        var panes = this.panes = [];
+        this.select = function(pane) {
+          angular.forEach(panes, function(pane) {
+            pane.selected = false;
+          });
+          pane.selected = true;
+        };
+        this.addPane = function(pane) {
+          if (panes.length === 0) {
+            this.select(pane);
+          }
+          panes.push(pane);
+        };
+        this.click = function(pane) {
+          ctrl.select(pane);
+        }
+      },
+      template: '<div class="nav navtabs"><div class="row"><ul' +
+        ' class="nav nav-pills">' +
+        '<li ng-repeat="pane in $ctrl.panes" ng-class="{active:pane.selected}">' +
+        '<a href="" ng-click="$ctrl.click(pane)">{{pane.title}}</a> ' +
+        '</li> </ul></div><br/><div class="tab-content" ng-transclude></div> </div>'
+    })
+    .component('pane', {
+      transclude: true,
+      require: {
+        tabsCtrl: '^tabs'
+      },
+      bindings: {
+        title: '@'
+      },
+      controller: function() {
+        this.$onInit = function() {
+          this.tabsCtrl.addPane(this);
+        };
+      },
+      template: '<div class="tab-pane" ng-if="$ctrl.selected" ng-transclude></div>'
+    });
+
+  angular.module('ozone').component('navmenu', {
+    bindings: {
+      metrics: '<'
+    },
+    templateUrl: 'static/templates/menu.html',
+    controller: function($http) {
+      var ctrl = this;
+      ctrl.docs = false;
+      $http.head("docs/index.html")
+        .then(function(result) {
+          ctrl.docs = true;
+        }, function() {
+          ctrl.docs = false;
+        });
+    }
+  });
+
+  angular.module('ozone').component('config', {
+    templateUrl: 'static/templates/config.html',
+    controller: function($scope, $http) {
+      var ctrl = this;
+      ctrl.selectedTags = [];
+      ctrl.configArray = [];
+
+      $http.get("conf?cmd=getOzoneTags")
+        .then(function(response) {
+          ctrl.tags = response.data;
+          var excludedTags = ['CBLOCK', 'KSM', 'SCM'];
+          for (var i = 0; i < excludedTags.length; i++) {
+            var idx = ctrl.tags.indexOf(excludedTags[i]);
+            // Remove CBLOCK related properties
+            if (idx > -1) {
+              ctrl.tags.splice(idx, 1);
+            }
+          }
+          ctrl.loadAll();
+        });
+
+      ctrl.convertToArray = function(srcObj) {
+        ctrl.keyTagMap = {};
+        for (var idx in srcObj) {
+          //console.log("Adding keys for "+idx)
+          for (var key in srcObj[idx]) {
+
+            if (ctrl.keyTagMap.hasOwnProperty(key)) {
+              ctrl.keyTagMap[key]['tag'].push(idx);
+            } else {
+              var newProp = {};
+              newProp['name'] = key;
+              newProp['value'] = srcObj[idx][key];
+              newProp['tag'] = [];
+              newProp['tag'].push(idx);
+              ctrl.keyTagMap[key] = newProp;
+            }
+          }
+        }
+      }
+
+      ctrl.loadAll = function() {
+        $http.get("conf?cmd=getPropertyByTag&tags=KSM,SCM," + ctrl.tags)
+          .then(function(response) {
+
+            ctrl.convertToArray(response.data);
+            ctrl.configs = Object.values(ctrl.keyTagMap);
+            ctrl.component = 'All';
+            console.log("ajay -> " + JSON.stringify(ctrl.configs));
+            ctrl.sortBy('name');
+          });
+      };
+
+      ctrl.filterTags = function() {
+        if (!ctrl.selectedTags) {
+          return true;
+        }
+
+        if (ctrl.selectedTags.length < 1 && ctrl.component == 'All') {
+          return true;
+        }
+
+        ctrl.configs = ctrl.configs.filter(function(item) {
+
+          if (ctrl.component != 'All' && (item['tag'].indexOf(ctrl
+              .component) < 0)) {
+            console.log(item['name'] + " false tag " + item['tag']);
+            return false;
+          }
+
+          if (ctrl.selectedTags.length < 1) {
+            return true;
+          }
+          for (var tag in item['tag']) {
+            tag = item['tag'][tag];
+            if (ctrl.selectedTags.indexOf(tag) > -1) {
+              return true;
+            }
+          }
+          return false;
+        });
+
+      };
+      ctrl.configFilter = function(config) {
+        return false;
+      };
+      ctrl.selected = function(tag) {
+        return ctrl.selectedTags.includes(tag);
+      };
+
+      ctrl.switchto = function(tag) {
+        ctrl.component = tag;
+        ctrl.reloadConfig();
+      };
+
+      ctrl.select = function(tag) {
+        var tagIdx = ctrl.selectedTags.indexOf(tag);
+        if (tagIdx > -1) {
+          ctrl.selectedTags.splice(tagIdx, 1);
+        } else {
+          ctrl.selectedTags.push(tag);
+        }
+        ctrl.reloadConfig();
+      };
+
+      ctrl.reloadConfig = function() {
+        ctrl.configs = [];
+        ctrl.configs = Object.values(ctrl.keyTagMap);
+        ctrl.filterTags();
+      };
+
+      ctrl.sortBy = function(field) {
+        ctrl.reverse = (ctrl.propertyName === field) ? !ctrl.reverse : false;
+        ctrl.propertyName = field;
+      };
+
+      ctrl.allSelected = function(comp) {
+        //console.log("Adding key for compo ->"+comp)
+        return ctrl.component == comp;
+      };
+
+    }
+  });
+
+})();
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/templates/config.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/config.html
new file mode 100644
index 0000000..6825750
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/config.html
@@ -0,0 +1,91 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+  -->
+
+<div class="row top-buffer">
+  <div class="col-md-2">
+    <input type="text" class="form-control" placeholder="Search Properties"
+           name="search" ng-model="search.$">
+  </div>
+  <div class="col-md-10">
+    <div class="btn-group btn-group-justified">
+      <a class="btn"
+         ng-class="$ctrl.allSelected('All') ? 'btn-primary' :'btn-secondary'"
+         ng-click="$ctrl.switchto('All')">All
+      </a>
+      <a class="btn"
+         ng-class="$ctrl.allSelected('KSM') ? 'btn-primary' :'btn-secondary'"
+         ng-click="$ctrl.switchto('KSM')">KSM</a>
+      <a class="btn"
+         ng-class="$ctrl.allSelected('SCM') ? 'btn-primary' :'btn-secondary'"
+         ng-click="$ctrl.switchto('SCM')">SCM</a>
+    </div>
+  </div>
+</div>
+<div class="row">
+  <div class="col-md-2">
+
+    <table class="table table-striped table-condensed tagPanel">
+      <colgroup>
+        <col class="col-md-12">
+      </colgroup>
+      <thead>
+      <tr>
+        <th>Tag</th>
+      </tr>
+      </thead>
+      <tbody>
+      <tr ng-click="$ctrl.select(tag)"
+          ng-class="$ctrl.selected(tag) ? 'selectedtag':''"
+          ng-repeat="tag in $ctrl.tags">
+        <td>{{tag}}</td>
+      </tr>
+      </tbody>
+    </table>
+  </div>
+  <div class="col-md-10">
+    <table class="table table-striped table-condensed table-hover wrap-table">
+      <thead>
+      <tr>
+        <th class="col-md-3" >
+          <a href="javascript:void(0)" ng-click="$ctrl.sortBy('name')">Property</a>
+          <span class="sortorder" ng-show="$ctrl.propertyName === 'name'"
+                ng-class="{reverse: $ctrl.reverse}">
+              </span>
+        </th>
+        <th class="col-md-2" style="word-wrap: break-word;">
+          <a href="javascript:void(0)" ng-click="$ctrl.sortBy('value')">Value</a>
+          <span class="sortorder" ng-show="$ctrl.propertyName === 'value'"
+                ng-class="{reverse: $ctrl.reverse}"></span>
+        </th>
+        <th class="col-md-7">
+          <a href="javascript:void(0)" ng-click="$ctrl.sortBy('description')">Description</a>
+          <span class="sortorder" ng-show="$ctrl.propertyName === 'description'"
+                ng-class="{reverse: reverse}"></span>
+        </th>
+      </tr>
+      </thead>
+      <tbody>
+      <tr
+          ng-repeat="config in $ctrl.configs | filter:search | orderBy:$ctrl.propertyName:$ctrl.reverse">
+        <td style="word-wrap: break-word;">{{config.name}}</td>
+        <td style="word-wrap: break-word;">{{config.value}}</td>
+        <td style="word-wrap: break-word;">{{config.description}}</td>
+      </tr>
+      </tbody>
+    </table>
+  </div>
+</div>
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/templates/jvm.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/jvm.html
new file mode 100644
index 0000000..c1f7d16
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/jvm.html
@@ -0,0 +1,26 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<table class="table table-bordered table-striped">
+    <tr>
+        <th>JVM:</th>
+        <td>{{$ctrl.jmx.SystemProperties.java_vm_name}} {{$ctrl.jmx.SystemProperties.java_vm_version}}</td>
+    </tr>
+    <tr>
+        <th>Input arguments:</th>
+        <td>{{$ctrl.jmx.InputArguments}}</td>
+    </tr>
+</table>
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/templates/menu.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/menu.html
new file mode 100644
index 0000000..95f1b484
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/menu.html
@@ -0,0 +1,60 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<div id="navbar" class="collapse navbar-collapse">
+    <ul class="nav navbar-nav" id="ui-tabs">
+        <li>
+            <a class="dropdown-toggle"
+               id="metricsMenu"
+               data-toggle="dropdown"
+               aria-haspopup="true"
+               aria-expanded="true">
+                Metrics
+                <span class="caret"></span>
+            </a>
+            <ul
+                class="dropdown-menu"
+                aria-labelledby="metricsMenu">
+                <li ng-repeat="(name, url) in $ctrl.metrics">
+                    <a ng-href="{{url}}">{{name}}<span
+                        aria-hidden="true"></span></a></li>
+            </ul>
+        </li>
+        <li><a href="#!/config">Configuration</a></li>
+        <li ng-show="$ctrl.docs"><a href="/docs">Documentation</a></li>
+        <li>
+            <a class="dropdown-toggle"
+               id="toolsMenu"
+               data-toggle="dropdown"
+               aria-haspopup="true"
+               aria-expanded="true"
+               >
+                Common tools
+                <span class="caret"></span>
+            </a>
+            <ul class="dropdown-menu" aria-labelledby="toolsMenu">
+                <li><a href="jmx">JMX <span
+                        aria-hidden="true"></span></a></li>
+                <li><a href="conf">Config <span
+                        aria-hidden="true"></a></li>
+                <li><a href="stacks">Stacks <span
+                        aria-hidden="true"></a></li>
+                <li><a href="logLevel">Log levels <span
+                        aria-hidden="true"></a></li>
+            </ul>
+        </li>
+    </ul>
+</div><!--/.nav-collapse -->
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/templates/overview.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/overview.html
new file mode 100644
index 0000000..30e2d26
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/overview.html
@@ -0,0 +1,39 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<h1>Overview</h1>
+<table class="table table-bordered table-striped">
+    <tbody>
+    <tr>
+        <th>Started:</th>
+        <td>{{$ctrl.jmx.StartedTimeInMillis | date : 'medium'}}</td>
+    </tr>
+    <tr>
+        <th>Version:</th>
+        <td>{{$ctrl.jmx.Version}}</td>
+    </tr>
+    <tr>
+        <th>Compiled:</th>
+        <td>{{$ctrl.jmx.CompileInfo}}</td>
+    </tr>
+    </tbody>
+</table>
+
+<h2>JVM parameters</h2>
+
+<jvm-parameters></jvm-parameters>
+
+<div ng-transclude></div>
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/main/resources/webapps/static/templates/rpc-metrics.html b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/rpc-metrics.html
new file mode 100644
index 0000000..facb152
--- /dev/null
+++ b/hadoop-hdds/framework/src/main/resources/webapps/static/templates/rpc-metrics.html
@@ -0,0 +1,87 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<div ng-hide="$ctrl.metrics.percentiles" class="alert alert-info">
+    Please set <b>rpc.metrics.quantile.enable</b> to <b>true</b> and define the
+    intervals in seconds with setting <b>rpc.metrics.percentiles.intervals</b>
+    (eg. set to <b>60,300</b>) in your hdfs-site.xml
+    to display Hadoop RPC related graphs.
+</div>
+<div ng-repeat="(window,windowed) in $ctrl.metrics.percentiles">
+    <h2>{{window}} window</h2>
+    <p>Quantiles based on a fixed {{window}} window. Calculated once at every
+        {{window}}</p>
+
+    <div class="row">
+        <div class="col-md-6 col-lg-4"
+             ng-repeat="(metric,percentiles) in windowed">
+            <h3>{{metric}}</h3>
+            <p>{{percentiles.numOps}} sample</p>
+            <nvd3 options="$ctrl.percentileGraphOptions"
+                  data="percentiles.graphdata"></nvd3>
+        </div>
+    </div>
+
+</div>
+<div class="row">
+    <div ng-show="$ctrl.metrics.numavgs" class="col-md-6">
+        <h2>Number of ops / Averages</h2>
+
+        <table class="table table-bordered table-striped">
+            <thead>
+            <tr>
+                <th>Metric name</th>
+                <th>Number of ops</th>
+                <th>Average time (ms)</th>
+            </tr>
+            </thead>
+            <tr ng-repeat="(key,metric) in $ctrl.metrics.numavgs">
+                <td>{{key}}</td>
+                <td>{{metric.numOps | number}}</td>
+                <td>{{metric.avgTime | number:2}}</td>
+            </tr>
+        </table>
+    </div>
+    <div ng-show="$ctrl.metrics.successfailures" class="col-md-6">
+        <h2>Success / Failures</h2>
+
+        <table class="table table-bordered table-striped">
+            <thead>
+            <tr>
+                <th>Metric name</th>
+                <th>Success</th>
+                <th>Failures</th>
+            </tr>
+            </thead>
+
+            <tr ng-repeat="(key,metric) in $ctrl.metrics.successfailures">
+                <td>{{key}}</td>
+                <td>{{metric.success}}</td>
+                <td>{{metric.failures}}</td>
+            </tr>
+        </table>
+    </div>
+</div>
+<div ng-show="$ctrl.metrics.others">
+    <h2>Other JMX Metrics</h2>
+
+    <table class="table">
+        <tr ng-repeat="metric in $ctrl.metrics.others">
+            <td>{{metric.key}}</td>
+            <td>{{metric.value}}</td>
+        </tr>
+    </table>
+</div>
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestBaseHttpServer.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestBaseHttpServer.java
new file mode 100644
index 0000000..c6eae0e
--- /dev/null
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestBaseHttpServer.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.server;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test Common ozone/hdds web methods.
+ */
+public class TestBaseHttpServer {
+  @Test
+  public void getBindAddress() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set("enabled", "false");
+
+    BaseHttpServer baseHttpServer = new BaseHttpServer(conf, "test") {
+      @Override
+      protected String getHttpAddressKey() {
+        return null;
+      }
+
+      @Override
+      protected String getHttpsAddressKey() {
+        return null;
+      }
+
+      @Override
+      protected String getHttpBindHostKey() {
+        return null;
+      }
+
+      @Override
+      protected String getHttpsBindHostKey() {
+        return null;
+      }
+
+      @Override
+      protected String getBindHostDefault() {
+        return null;
+      }
+
+      @Override
+      protected int getHttpBindPortDefault() {
+        return 0;
+      }
+
+      @Override
+      protected int getHttpsBindPortDefault() {
+        return 0;
+      }
+
+      @Override
+      protected String getKeytabFile() {
+        return null;
+      }
+
+      @Override
+      protected String getSpnegoPrincipal() {
+        return null;
+      }
+
+      @Override
+      protected String getEnabledKey() {
+        return "enabled";
+      }
+    };
+
+    conf.set("addresskey", "0.0.0.0:1234");
+
+    Assert.assertEquals("/0.0.0.0:1234", baseHttpServer
+        .getBindAddress("bindhostkey", "addresskey",
+            "default", 65).toString());
+
+    conf.set("bindhostkey", "1.2.3.4");
+
+    Assert.assertEquals("/1.2.3.4:1234", baseHttpServer
+        .getBindAddress("bindhostkey", "addresskey",
+            "default", 65).toString());
+  }
+
+}
\ No newline at end of file
diff --git a/hadoop-hdds/framework/src/test/resources/ozone-site.xml b/hadoop-hdds/framework/src/test/resources/ozone-site.xml
new file mode 100644
index 0000000..77dd7ef
--- /dev/null
+++ b/hadoop-hdds/framework/src/test/resources/ozone-site.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+</configuration>
diff --git a/hadoop-hdds/pom.xml b/hadoop-hdds/pom.xml
new file mode 100644
index 0000000..c15c541
--- /dev/null
+++ b/hadoop-hdds/pom.xml
@@ -0,0 +1,117 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project-dist</artifactId>
+    <version>3.2.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project-dist</relativePath>
+  </parent>
+
+  <artifactId>hadoop-hdds</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Distributed Data Store Parent project</description>
+  <name>Apache Hdds</name>
+  <packaging>pom</packaging>
+
+  <modules>
+    <module>client</module>
+    <module>common</module>
+    <module>framework</module>
+    <module>container-service</module>
+    <module>server-scm</module>
+    <module>tools</module>
+
+  </modules>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>**/target/**</exclude>
+            <exclude>.gitattributes</exclude>
+            <exclude>.idea/**</exclude>
+            <exclude>src/main/resources/webapps/static/angular-1.6.4.min.js</exclude>
+            <exclude>src/main/resources/webapps/static/angular-nvd3-1.0.9.min.js</exclude>
+            <exclude>src/main/resources/webapps/static/angular-route-1.6.4.min.js</exclude>
+            <exclude>src/main/resources/webapps/static/d3-3.5.17.min.js</exclude>
+            <exclude>src/main/resources/webapps/static/nvd3-1.8.5.min.css</exclude>
+            <exclude>src/main/resources/webapps/static/nvd3-1.8.5.min.css.map</exclude>
+            <exclude>src/main/resources/webapps/static/nvd3-1.8.5.min.js</exclude>
+            <exclude>src/main/resources/webapps/static/nvd3-1.8.5.min.js.map</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <configuration>
+          <excludeFilterFile combine.self="override"></excludeFilterFile>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hadoop-hdds/server-scm/pom.xml b/hadoop-hdds/server-scm/pom.xml
new file mode 100644
index 0000000..fd927d8
--- /dev/null
+++ b/hadoop-hdds/server-scm/pom.xml
@@ -0,0 +1,159 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-hdds</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-hdds-server-scm</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache HDDS SCM server</description>
+  <name>Apache Hadoop HDDS SCM server</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdds</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-container-service</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-framework</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-container-service</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+
+    <dependency>
+      <groupId>org.hamcrest</groupId>
+      <artifactId>hamcrest-core</artifactId>
+      <version>1.3</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>io.dropwizard.metrics</groupId>
+      <artifactId>metrics-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.assertj</groupId>
+      <artifactId>assertj-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.openjdk.jmh</groupId>
+      <artifactId>jmh-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.openjdk.jmh</groupId>
+      <artifactId>jmh-generator-annprocess</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.hamcrest</groupId>
+      <artifactId>hamcrest-all</artifactId>
+      <version>1.3</version>
+    </dependency>
+    <dependency>
+      <groupId>org.bouncycastle</groupId>
+      <artifactId>bcprov-jdk16</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy web resources</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <copy toDir="${project.build.directory}/webapps">
+                  <fileset dir="${basedir}/src/main/webapps">
+                    <exclude name="**/proto-web.xml"/>
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-common-html</id>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>unpack</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <artifactItems>
+            <artifactItem>
+              <groupId>org.apache.hadoop</groupId>
+              <artifactId>hadoop-hdds-server-framework</artifactId>
+              <outputDirectory>${project.build.directory}/</outputDirectory>
+              <includes>webapps/static/**/*.*</includes>
+            </artifactItem>
+          </artifactItems>
+          <overWriteSnapshots>true</overWriteSnapshots>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMMXBean.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMMXBean.java
new file mode 100644
index 0000000..17b6814
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMMXBean.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.server.ServiceRuntimeInfo;
+
+import java.util.Map;
+
+/**
+ *
+ * This is the JMX management interface for scm information.
+ */
+@InterfaceAudience.Private
+public interface SCMMXBean extends ServiceRuntimeInfo {
+
+  /**
+   * Get the SCM RPC server port that used to listen to datanode requests.
+   * @return SCM datanode RPC server port
+   */
+  String getDatanodeRpcPort();
+
+  /**
+   * Get the SCM RPC server port that used to listen to client requests.
+   * @return SCM client RPC server port
+   */
+  String getClientRpcPort();
+
+  /**
+   * Get container report info that includes container IO stats of nodes.
+   * @return The datanodeUUid to report json string mapping
+   */
+  Map<String, String> getContainerReport();
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMStorage.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMStorage.java
new file mode 100644
index 0000000..27e9363
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/SCMStorage.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+import org.apache.hadoop.ozone.common.Storage;
+
+import java.io.IOException;
+import java.util.Properties;
+import java.util.UUID;
+
+import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
+import static org.apache.hadoop.ozone.OzoneConsts.SCM_ID;
+import static org.apache.hadoop.ozone.OzoneConsts.STORAGE_DIR;
+
+/**
+ * SCMStorage is responsible for management of the StorageDirectories used by
+ * the SCM.
+ */
+public class SCMStorage extends Storage {
+
+  /**
+   * Construct SCMStorage.
+   * @throws IOException if any directories are inaccessible.
+   */
+  public SCMStorage(OzoneConfiguration conf) throws IOException {
+    super(NodeType.SCM, getOzoneMetaDirPath(conf), STORAGE_DIR);
+  }
+
+  public void setScmId(String scmId) throws IOException {
+    if (getState() == StorageState.INITIALIZED) {
+      throw new IOException("SCM is already initialized.");
+    } else {
+      getStorageInfo().setProperty(SCM_ID, scmId);
+    }
+  }
+
+  /**
+   * Retrieves the SCM ID from the version file.
+   * @return SCM_ID
+   */
+  public String getScmId() {
+    return getStorageInfo().getProperty(SCM_ID);
+  }
+
+  @Override
+  protected Properties getNodeProperties() {
+    String scmId = getScmId();
+    if (scmId == null) {
+      scmId = UUID.randomUUID().toString();
+    }
+    Properties scmProperties = new Properties();
+    scmProperties.setProperty(SCM_ID, scmId);
+    return scmProperties;
+  }
+
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManager.java
new file mode 100644
index 0000000..ce0d4f8
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManager.java
@@ -0,0 +1,1290 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.RemovalListener;
+import com.google.common.cache.RemovalNotification;
+import com.google.protobuf.BlockingService;
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.HddsUtils;
+import org.apache.hadoop.hdds.scm.block.BlockManager;
+import org.apache.hadoop.hdds.scm.block.BlockManagerImpl;
+import org.apache.hadoop.hdds.scm.container.ContainerMapping;
+import org.apache.hadoop.hdds.scm.container.Mapping;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.DeleteBlockResult;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.ContainerStat;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMMetrics;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.node.SCMNodeManager;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol;
+import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto
+    .DeleteBlockTransactionResult;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos
+    .ContainerBlocksDeletionACKResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeAddressList;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMReregisterCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SendContainerReportProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
+import org.apache.hadoop.hdds.server.ServiceRuntimeInfoImpl;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
+import org.apache.hadoop.ozone.common.Storage.StorageState;
+import org.apache.hadoop.ozone.common.StorageInfo;
+import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol;
+import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand;
+import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
+import org.apache.hadoop.ozone.protocol.commands.RegisteredCommand;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.ozone.protocolPB
+    .ScmBlockLocationProtocolServerSideTranslatorPB;
+import org.apache.hadoop.ozone.protocolPB.StorageContainerDatanodeProtocolPB;
+import org.apache.hadoop.ozone.protocolPB
+    .StorageContainerDatanodeProtocolServerSideTranslatorPB;
+import org.apache.hadoop.ozone.protocolPB
+    .StorageContainerLocationProtocolServerSideTranslatorPB;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CLIENT_ADDRESS_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DATANODE_ADDRESS_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_MB;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HANDLER_COUNT_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HANDLER_COUNT_KEY;
+import static org.apache.hadoop.hdds.protocol.proto
+    .ScmBlockLocationProtocolProtos.DeleteScmBlockResult.Result;
+import static org.apache.hadoop.hdds.server.ServerUtils.updateRPCListenAddress;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED;
+import static org.apache.hadoop.util.ExitUtil.terminate;
+
+/**
+ * StorageContainerManager is the main entry point for the service that provides
+ * information about which SCM nodes host containers.
+ *
+ * DataNodes report to StorageContainerManager using heartbeat
+ * messages. SCM allocates containers and returns a pipeline.
+ *
+ * A client once it gets a pipeline (a list of datanodes) will connect to the
+ * datanodes and create a container, which then can be used to store data.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "CBLOCK", "OZONE", "HBASE"})
+public class StorageContainerManager extends ServiceRuntimeInfoImpl
+    implements StorageContainerDatanodeProtocol,
+    StorageContainerLocationProtocol, ScmBlockLocationProtocol, SCMMXBean {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(StorageContainerManager.class);
+
+  /**
+   *  Startup options.
+   */
+  public enum StartupOption {
+    INIT("-init"),
+    CLUSTERID("-clusterid"),
+    GENCLUSTERID("-genclusterid"),
+    REGULAR("-regular"),
+    HELP("-help");
+
+    private final String name;
+    private String clusterId = null;
+
+    public void setClusterId(String cid) {
+      if(cid != null && !cid.isEmpty()) {
+        clusterId = cid;
+      }
+    }
+
+    public String getClusterId() {
+      return clusterId;
+    }
+
+    StartupOption(String arg) {
+      this.name = arg;
+    }
+
+    public String getName() {
+      return name;
+    }
+  }
+
+  /**
+   * NodeManager and container Managers for SCM.
+   */
+  private final NodeManager scmNodeManager;
+  private final Mapping scmContainerManager;
+  private final BlockManager scmBlockManager;
+  private final SCMStorage scmStorage;
+
+  /** The RPC server that listens to requests from DataNodes. */
+  private final RPC.Server datanodeRpcServer;
+  private final InetSocketAddress datanodeRpcAddress;
+
+  /** The RPC server that listens to requests from clients. */
+  private final RPC.Server clientRpcServer;
+  private final InetSocketAddress clientRpcAddress;
+
+  /** The RPC server that listens to requests from block service clients. */
+  private final RPC.Server blockRpcServer;
+  private final InetSocketAddress blockRpcAddress;
+
+  private final StorageContainerManagerHttpServer httpServer;
+
+  /** SCM mxbean. */
+  private ObjectName scmInfoBeanName;
+
+  /** SCM super user. */
+  private final String scmUsername;
+  private final Collection<String> scmAdminUsernames;
+
+  /** SCM metrics. */
+  private static SCMMetrics metrics;
+  /** Key = DatanodeUuid, value = ContainerStat. */
+  private Cache<String, ContainerStat> containerReportCache;
+
+
+  private static final String USAGE =
+      "Usage: \n ozone scm [genericOptions] "
+          + "[ " + StartupOption.INIT.getName() + " [ "
+          + StartupOption.CLUSTERID.getName() + " <cid> ] ]\n "
+          + "ozone scm [genericOptions] [ "
+          + StartupOption.GENCLUSTERID.getName() + " ]\n " +
+          "ozone scm [ "
+          + StartupOption.HELP.getName() + " ]\n";
+  /**
+   * Creates a new StorageContainerManager.  Configuration will be updated with
+   * information on the actual listening addresses used for RPC servers.
+   *
+   * @param conf configuration
+   */
+  private StorageContainerManager(OzoneConfiguration conf)
+      throws IOException {
+
+    final int handlerCount = conf.getInt(
+        OZONE_SCM_HANDLER_COUNT_KEY, OZONE_SCM_HANDLER_COUNT_DEFAULT);
+    final int cacheSize = conf.getInt(OZONE_SCM_DB_CACHE_SIZE_MB,
+        OZONE_SCM_DB_CACHE_SIZE_DEFAULT);
+
+    StorageContainerManager.initMetrics();
+    initContainerReportCache(conf);
+
+    scmStorage = new SCMStorage(conf);
+    if (scmStorage.getState() != StorageState.INITIALIZED) {
+      throw new SCMException("SCM not initialized.",
+          ResultCodes.SCM_NOT_INITIALIZED);
+    }
+    scmNodeManager = new SCMNodeManager(conf, scmStorage.getClusterID(), this);
+    scmContainerManager = new ContainerMapping(conf, scmNodeManager, cacheSize);
+    scmBlockManager = new BlockManagerImpl(conf, scmNodeManager,
+        scmContainerManager, cacheSize);
+
+    scmAdminUsernames = conf.getTrimmedStringCollection(
+        OzoneConfigKeys.OZONE_ADMINISTRATORS);
+    scmUsername = UserGroupInformation.getCurrentUser().getUserName();
+    if (!scmAdminUsernames.contains(scmUsername)) {
+      scmAdminUsernames.add(scmUsername);
+    }
+
+    RPC.setProtocolEngine(conf, StorageContainerDatanodeProtocolPB.class,
+        ProtobufRpcEngine.class);
+    RPC.setProtocolEngine(conf, StorageContainerLocationProtocolPB.class,
+        ProtobufRpcEngine.class);
+    RPC.setProtocolEngine(conf, ScmBlockLocationProtocolPB.class,
+        ProtobufRpcEngine.class);
+
+    BlockingService dnProtoPbService = StorageContainerDatanodeProtocolProtos.
+        StorageContainerDatanodeProtocolService.newReflectiveBlockingService(
+        new StorageContainerDatanodeProtocolServerSideTranslatorPB(this));
+
+    final InetSocketAddress datanodeRpcAddr =
+        HddsServerUtil.getScmDataNodeBindAddress(conf);
+    datanodeRpcServer = startRpcServer(conf, datanodeRpcAddr,
+        StorageContainerDatanodeProtocolPB.class, dnProtoPbService,
+        handlerCount);
+    datanodeRpcAddress = updateRPCListenAddress(conf,
+        OZONE_SCM_DATANODE_ADDRESS_KEY, datanodeRpcAddr, datanodeRpcServer);
+
+    // SCM Container Service RPC
+    BlockingService storageProtoPbService =
+        StorageContainerLocationProtocolProtos
+            .StorageContainerLocationProtocolService
+            .newReflectiveBlockingService(
+            new StorageContainerLocationProtocolServerSideTranslatorPB(this));
+
+    final InetSocketAddress scmAddress =
+        HddsServerUtil.getScmClientBindAddress(conf);
+    clientRpcServer = startRpcServer(conf, scmAddress,
+        StorageContainerLocationProtocolPB.class, storageProtoPbService,
+        handlerCount);
+    clientRpcAddress = updateRPCListenAddress(conf,
+        OZONE_SCM_CLIENT_ADDRESS_KEY, scmAddress, clientRpcServer);
+
+    // SCM Block Service RPC
+    BlockingService blockProtoPbService =
+        ScmBlockLocationProtocolProtos
+            .ScmBlockLocationProtocolService
+            .newReflectiveBlockingService(
+            new ScmBlockLocationProtocolServerSideTranslatorPB(this));
+
+    final InetSocketAddress scmBlockAddress =
+        HddsServerUtil.getScmBlockClientBindAddress(conf);
+    blockRpcServer = startRpcServer(conf, scmBlockAddress,
+        ScmBlockLocationProtocolPB.class, blockProtoPbService,
+        handlerCount);
+    blockRpcAddress = updateRPCListenAddress(conf,
+        OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY, scmBlockAddress, blockRpcServer);
+
+    httpServer = new StorageContainerManagerHttpServer(conf);
+
+    registerMXBean();
+  }
+
+  /**
+   * Initialize container reports cache that sent from datanodes.
+   *
+   * @param conf
+   */
+  private void initContainerReportCache(OzoneConfiguration conf) {
+    containerReportCache = CacheBuilder.newBuilder()
+        .expireAfterAccess(Long.MAX_VALUE, TimeUnit.MILLISECONDS)
+        .maximumSize(Integer.MAX_VALUE)
+        .removalListener(new RemovalListener<String, ContainerStat>() {
+          @Override
+          public void onRemoval(
+              RemovalNotification<String, ContainerStat> removalNotification) {
+            synchronized (containerReportCache) {
+              ContainerStat stat = removalNotification.getValue();
+              // remove invalid container report
+              metrics.decrContainerStat(stat);
+              LOG.debug(
+                  "Remove expired container stat entry for datanode: {}.",
+                  removalNotification.getKey());
+            }
+          }
+        }).build();
+  }
+
+  /**
+   * Builds a message for logging startup information about an RPC server.
+   *
+   * @param description RPC server description
+   * @param addr RPC server listening address
+   * @return server startup message
+   */
+  private static String buildRpcServerStartMessage(String description,
+      InetSocketAddress addr) {
+    return addr != null ? String.format("%s is listening at %s",
+        description, addr.toString()) :
+        String.format("%s not started", description);
+  }
+
+  /**
+   * Starts an RPC server, if configured.
+   *
+   * @param conf configuration
+   * @param addr configured address of RPC server
+   * @param protocol RPC protocol provided by RPC server
+   * @param instance RPC protocol implementation instance
+   * @param handlerCount RPC server handler count
+   *
+   * @return RPC server
+   * @throws IOException if there is an I/O error while creating RPC server
+   */
+  private static RPC.Server startRpcServer(OzoneConfiguration conf,
+      InetSocketAddress addr, Class<?> protocol, BlockingService instance,
+      int handlerCount)
+      throws IOException {
+    RPC.Server rpcServer = new RPC.Builder(conf)
+        .setProtocol(protocol)
+        .setInstance(instance)
+        .setBindAddress(addr.getHostString())
+        .setPort(addr.getPort())
+        .setNumHandlers(handlerCount)
+        .setVerbose(false)
+        .setSecretManager(null)
+        .build();
+
+    DFSUtil.addPBProtocol(conf, protocol, instance, rpcServer);
+    return rpcServer;
+  }
+
+  private void registerMXBean() {
+    Map<String, String> jmxProperties = new HashMap<>();
+    jmxProperties.put("component", "ServerRuntime");
+    this.scmInfoBeanName =
+        MBeans.register("StorageContainerManager",
+            "StorageContainerManagerInfo",
+            jmxProperties,
+            this);
+  }
+
+  private void unregisterMXBean() {
+    if(this.scmInfoBeanName != null) {
+      MBeans.unregister(this.scmInfoBeanName);
+      this.scmInfoBeanName = null;
+    }
+  }
+
+  /**
+   * Main entry point for starting StorageContainerManager.
+   *
+   * @param argv arguments
+   * @throws IOException if startup fails due to I/O error
+   */
+  public static void main(String[] argv) throws IOException {
+    if (DFSUtil.parseHelpArgument(argv, USAGE,
+        System.out, true)) {
+      System.exit(0);
+    }
+    try {
+      OzoneConfiguration conf = new OzoneConfiguration();
+      GenericOptionsParser hParser = new GenericOptionsParser(conf, argv);
+      if (!hParser.isParseSuccessful()) {
+        System.err.println("USAGE: " + USAGE + "\n");
+        hParser.printGenericCommandUsage(System.err);
+        System.exit(1);
+      }
+      StringUtils.startupShutdownMessage(StorageContainerManager.class,
+          argv, LOG);
+      StorageContainerManager scm = createSCM(hParser.getRemainingArgs(), conf);
+      if (scm != null) {
+        scm.start();
+        scm.join();
+      }
+    } catch (Throwable t) {
+      LOG.error("Failed to start the StorageContainerManager.", t);
+      terminate(1, t);
+    }
+  }
+
+  private static void printUsage(PrintStream out) {
+    out.println(USAGE + "\n");
+  }
+
+  public static StorageContainerManager createSCM(String[] argv,
+      OzoneConfiguration conf) throws IOException {
+    if (!HddsUtils.isHddsEnabled(conf)) {
+      System.err.println("SCM cannot be started in secure mode or when " +
+          OZONE_ENABLED + " is set to false");
+      System.exit(1);
+    }
+    StartupOption startOpt = parseArguments(argv);
+    if (startOpt == null) {
+      printUsage(System.err);
+      terminate(1);
+      return null;
+    }
+    switch (startOpt) {
+    case INIT:
+      terminate(scmInit(conf) ? 0 : 1);
+      return null;
+    case GENCLUSTERID:
+      System.out.println("Generating new cluster id:");
+      System.out.println(StorageInfo.newClusterID());
+      terminate(0);
+      return null;
+    case HELP:
+      printUsage(System.err);
+      terminate(0);
+      return null;
+    default:
+      return new StorageContainerManager(conf);
+    }
+  }
+
+  /**
+   * Routine to set up the Version info for StorageContainerManager.
+   *
+   * @param conf OzoneConfiguration
+   * @return true if SCM initialization is successful, false otherwise.
+   * @throws IOException if init fails due to I/O error
+   */
+  public static boolean scmInit(OzoneConfiguration conf) throws IOException {
+    SCMStorage scmStorage = new SCMStorage(conf);
+    StorageState state = scmStorage.getState();
+    if (state != StorageState.INITIALIZED) {
+      try {
+        String clusterId = StartupOption.INIT.getClusterId();
+        if (clusterId != null && !clusterId.isEmpty()) {
+          scmStorage.setClusterId(clusterId);
+        }
+        scmStorage.initialize();
+        System.out.println("SCM initialization succeeded." +
+            "Current cluster id for sd=" + scmStorage.getStorageDir() + ";cid="
+                + scmStorage.getClusterID());
+        return true;
+      } catch (IOException ioe) {
+        LOG.error("Could not initialize SCM version file", ioe);
+        return false;
+      }
+    } else {
+      System.out.println("SCM already initialized. Reusing existing" +
+          " cluster id for sd=" + scmStorage.getStorageDir() + ";cid="
+              + scmStorage.getClusterID());
+      return true;
+    }
+  }
+
+  private static StartupOption parseArguments(String[] args) {
+    int argsLen = (args == null) ? 0 : args.length;
+    StartupOption startOpt = StartupOption.HELP;
+    if (argsLen == 0) {
+      startOpt = StartupOption.REGULAR;
+    }
+    for (int i = 0; i < argsLen; i++) {
+      String cmd = args[i];
+      if (StartupOption.INIT.getName().equalsIgnoreCase(cmd)) {
+        startOpt = StartupOption.INIT;
+        if (argsLen > 3) {
+          return null;
+        }
+        for (i = i + 1; i < argsLen; i++) {
+          if (args[i].equalsIgnoreCase(StartupOption.CLUSTERID.getName())) {
+            i++;
+            if (i < argsLen && !args[i].isEmpty()) {
+              startOpt.setClusterId(args[i]);
+            } else {
+              // if no cluster id specified or is empty string, return null
+              LOG.error("Must specify a valid cluster ID after the "
+                  + StartupOption.CLUSTERID.getName() + " flag");
+              return null;
+            }
+          } else {
+            return null;
+          }
+        }
+      } else if (StartupOption.GENCLUSTERID.getName().equalsIgnoreCase(cmd)) {
+        if (argsLen > 1) {
+          return null;
+        }
+        startOpt = StartupOption.GENCLUSTERID;
+      }
+    }
+    return startOpt;
+  }
+
+  /**
+   * Returns a SCMCommandRepose from the SCM Command.
+   * @param cmd - Cmd
+   * @return SCMCommandResponseProto
+   * @throws InvalidProtocolBufferException
+   */
+  @VisibleForTesting
+  public SCMCommandResponseProto getCommandResponse(SCMCommand cmd,
+      final String datanodID)
+      throws IOException {
+    SCMCmdType type = cmd.getType();
+    SCMCommandResponseProto.Builder builder =
+        SCMCommandResponseProto.newBuilder()
+        .setDatanodeUUID(datanodID);
+    switch (type) {
+    case registeredCommand:
+      return builder.setCmdType(SCMCmdType.registeredCommand)
+          .setRegisteredProto(
+              SCMRegisteredCmdResponseProto.getDefaultInstance())
+          .build();
+    case versionCommand:
+      return builder.setCmdType(SCMCmdType.versionCommand)
+          .setVersionProto(SCMVersionResponseProto.getDefaultInstance())
+          .build();
+    case sendContainerReport:
+      return builder.setCmdType(SCMCmdType.sendContainerReport)
+          .setSendReport(SendContainerReportProto.getDefaultInstance())
+          .build();
+    case reregisterCommand:
+      return builder.setCmdType(SCMCmdType.reregisterCommand)
+          .setReregisterProto(SCMReregisterCmdResponseProto
+              .getDefaultInstance())
+          .build();
+    case deleteBlocksCommand:
+      // Once SCM sends out the deletion message, increment the count.
+      // this is done here instead of when SCM receives the ACK, because
+      // DN might not be able to response the ACK for sometime. In case
+      // it times out, SCM needs to re-send the message some more times.
+      List<Long> txs = ((DeleteBlocksCommand) cmd).blocksTobeDeleted()
+          .stream().map(tx -> tx.getTxID()).collect(Collectors.toList());
+      this.getScmBlockManager().getDeletedBlockLog().incrementCount(txs);
+      return builder.setCmdType(SCMCmdType.deleteBlocksCommand)
+          .setDeleteBlocksProto(((DeleteBlocksCommand) cmd).getProto())
+          .build();
+    case closeContainerCommand:
+      return builder.setCmdType(SCMCmdType.closeContainerCommand)
+          .setCloseContainerProto(((CloseContainerCommand)cmd).getProto())
+          .build();
+    default:
+      throw new IllegalArgumentException("Not implemented");
+    }
+  }
+
+  @VisibleForTesting
+  public static SCMRegisteredCmdResponseProto getRegisteredResponse(
+      SCMCommand cmd, SCMNodeAddressList addressList) {
+    Preconditions.checkState(cmd.getClass() == RegisteredCommand.class);
+    RegisteredCommand rCmd = (RegisteredCommand) cmd;
+    SCMCmdType type = cmd.getType();
+    if (type != SCMCmdType.registeredCommand) {
+      throw new IllegalArgumentException("Registered command is not well " +
+          "formed. Internal Error.");
+    }
+    return SCMRegisteredCmdResponseProto.newBuilder()
+        //TODO : Fix this later when we have multiple SCM support.
+        //.setAddressList(addressList)
+        .setErrorCode(rCmd.getError())
+        .setClusterID(rCmd.getClusterID())
+        .setDatanodeUUID(rCmd.getDatanodeUUID()).build();
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public Pipeline getContainer(String containerName) throws IOException {
+    checkAdminAccess();
+    return scmContainerManager.getContainer(containerName).getPipeline();
+  }
+
+  @VisibleForTesting
+  public ContainerInfo getContainerInfo(String containerName)
+      throws IOException {
+    return scmContainerManager.getContainer(containerName);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<ContainerInfo> listContainer(String startName,
+      String prefixName, int count) throws IOException {
+    return scmContainerManager.listContainer(startName, prefixName, count);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void deleteContainer(String containerName) throws IOException {
+    checkAdminAccess();
+    scmContainerManager.deleteContainer(containerName);
+  }
+
+  /**
+   * Queries a list of Node Statuses.
+   *
+   * @param nodeStatuses
+   * @param queryScope
+   * @param poolName @return List of Datanodes.
+   */
+  @Override
+  public HddsProtos.NodePool queryNode(EnumSet<NodeState> nodeStatuses,
+      HddsProtos.QueryScope queryScope, String poolName) throws IOException {
+
+    if (queryScope == HddsProtos.QueryScope.POOL) {
+      throw new IllegalArgumentException("Not Supported yet");
+    }
+
+    List<DatanodeDetails> datanodes = queryNode(nodeStatuses);
+    HddsProtos.NodePool.Builder poolBuilder =
+        HddsProtos.NodePool.newBuilder();
+
+    for (DatanodeDetails datanode : datanodes) {
+      HddsProtos.Node node = HddsProtos.Node.newBuilder()
+          .setNodeID(datanode.getProtoBufMessage())
+          .addAllNodeStates(nodeStatuses)
+          .build();
+      poolBuilder.addNodes(node);
+    }
+
+    return poolBuilder.build();
+  }
+
+  /**
+   * Notify from client when begin/finish operation for container/pipeline
+   * objects on datanodes.
+   * @param type
+   * @param name
+   * @param op
+   * @param stage
+   */
+  @Override
+  public void notifyObjectStageChange(
+      ObjectStageChangeRequestProto.Type type, String name,
+      ObjectStageChangeRequestProto.Op op,
+      ObjectStageChangeRequestProto.Stage stage) throws IOException {
+
+    LOG.info("Object type {} name {} op {} new stage {}",
+        type, name, op, stage);
+    if (type == ObjectStageChangeRequestProto.Type.container) {
+      if (op == ObjectStageChangeRequestProto.Op.create) {
+        if (stage == ObjectStageChangeRequestProto.Stage.begin) {
+          scmContainerManager.updateContainerState(name,
+              HddsProtos.LifeCycleEvent.CREATE);
+        } else {
+          scmContainerManager.updateContainerState(name,
+              HddsProtos.LifeCycleEvent.CREATED);
+        }
+      } else if (op == ObjectStageChangeRequestProto.Op.close) {
+        if (stage == ObjectStageChangeRequestProto.Stage.begin) {
+          scmContainerManager.updateContainerState(name,
+              HddsProtos.LifeCycleEvent.FINALIZE);
+        } else {
+          scmContainerManager.updateContainerState(name,
+              HddsProtos.LifeCycleEvent.CLOSE);
+        }
+      }
+    } //else if (type == ObjectStageChangeRequestProto.Type.pipeline) {
+    // TODO: pipeline state update will be addressed in future patch.
+    //}
+  }
+
+  /**
+   * Creates a replication pipeline of a specified type.
+   */
+  @Override
+  public Pipeline createReplicationPipeline(
+      HddsProtos.ReplicationType replicationType,
+      HddsProtos.ReplicationFactor factor,
+      HddsProtos.NodePool nodePool)
+      throws IOException {
+     // TODO: will be addressed in future patch.
+    return null;
+  }
+
+  /**
+   * Queries a list of Node that match a set of statuses.
+   * <p>
+   * For example, if the nodeStatuses is HEALTHY and RAFT_MEMBER,
+   * then this call will return all healthy nodes which members in
+   * Raft pipeline.
+   * <p>
+   * Right now we don't support operations, so we assume it is an AND operation
+   * between the operators.
+   *
+   * @param nodeStatuses - A set of NodeStates.
+   * @return List of Datanodes.
+   */
+
+  public List<DatanodeDetails> queryNode(EnumSet<NodeState> nodeStatuses) {
+    Preconditions.checkNotNull(nodeStatuses, "Node Query set cannot be null");
+    Preconditions.checkState(nodeStatuses.size() > 0, "No valid arguments " +
+        "in the query set");
+    List<DatanodeDetails> resultList = new LinkedList<>();
+    Set<DatanodeDetails> currentSet = new TreeSet<>();
+
+    for (NodeState nodeState : nodeStatuses) {
+      Set<DatanodeDetails> nextSet = queryNodeState(nodeState);
+      if ((nextSet == null) || (nextSet.size() == 0)) {
+        // Right now we only support AND operation. So intersect with
+        // any empty set is null.
+        return resultList;
+      }
+      // First time we have to add all the elements, next time we have to
+      // do an intersection operation on the set.
+      if (currentSet.size() == 0) {
+        currentSet.addAll(nextSet);
+      } else {
+        currentSet.retainAll(nextSet);
+      }
+    }
+
+    resultList.addAll(currentSet);
+    return resultList;
+  }
+
+  /**
+   * Query the System for Nodes.
+   *
+   * @param nodeState - NodeState that we are interested in matching.
+   * @return Set of Datanodes that match the NodeState.
+   */
+  private Set<DatanodeDetails> queryNodeState(NodeState nodeState) {
+    if (nodeState == NodeState.RAFT_MEMBER ||
+        nodeState == NodeState.FREE_NODE) {
+      throw new IllegalStateException("Not implemented yet");
+    }
+    Set<DatanodeDetails> returnSet = new TreeSet<>();
+    List<DatanodeDetails> tmp = getScmNodeManager().getNodes(nodeState);
+    if ((tmp != null) && (tmp.size() > 0)) {
+      returnSet.addAll(tmp);
+    }
+    return returnSet;
+  }
+
+  /**
+   * Asks SCM where a container should be allocated. SCM responds with the set
+   * of datanodes that should be used creating this container.
+   *
+   * @param containerName - Name of the container.
+   * @param replicationFactor - replication factor.
+   * @return pipeline
+   * @throws IOException
+   */
+  @Override
+  public Pipeline allocateContainer(HddsProtos.ReplicationType replicationType,
+      HddsProtos.ReplicationFactor replicationFactor, String containerName,
+      String owner) throws IOException {
+
+    checkAdminAccess();
+    return scmContainerManager
+        .allocateContainer(replicationType, replicationFactor, containerName,
+            owner).getPipeline();
+  }
+
+  /**
+   * Returns listening address of StorageLocation Protocol RPC server.
+   *
+   * @return listen address of StorageLocation RPC server
+   */
+  @VisibleForTesting
+  public InetSocketAddress getClientRpcAddress() {
+    return clientRpcAddress;
+  }
+
+  @Override
+  public String getClientRpcPort() {
+    InetSocketAddress addr = getClientRpcAddress();
+    return addr == null ? "0" : Integer.toString(addr.getPort());
+  }
+
+  /**
+   * Returns listening address of StorageDatanode Protocol RPC server.
+   *
+   * @return Address where datanode are communicating.
+   */
+  public InetSocketAddress getDatanodeRpcAddress() {
+    return datanodeRpcAddress;
+  }
+
+  @Override
+  public String getDatanodeRpcPort() {
+    InetSocketAddress addr = getDatanodeRpcAddress();
+    return addr == null ? "0" : Integer.toString(addr.getPort());
+  }
+
+  /**
+   * Start service.
+   */
+  public void start() throws IOException {
+    LOG.info(buildRpcServerStartMessage(
+        "StorageContainerLocationProtocol RPC server", clientRpcAddress));
+    DefaultMetricsSystem.initialize("StorageContainerManager");
+    clientRpcServer.start();
+    LOG.info(buildRpcServerStartMessage(
+        "ScmBlockLocationProtocol RPC server", blockRpcAddress));
+    blockRpcServer.start();
+    LOG.info(buildRpcServerStartMessage("RPC server for DataNodes",
+        datanodeRpcAddress));
+    datanodeRpcServer.start();
+    httpServer.start();
+    scmBlockManager.start();
+
+    setStartTime();
+
+  }
+
+  /**
+   * Stop service.
+   */
+  public void stop() {
+    try {
+      LOG.info("Stopping block service RPC server");
+      blockRpcServer.stop();
+    } catch (Exception ex) {
+      LOG.error("Storage Container Manager blockRpcServer stop failed.", ex);
+    }
+
+    try {
+      LOG.info("Stopping the StorageContainerLocationProtocol RPC server");
+      clientRpcServer.stop();
+    } catch (Exception ex) {
+      LOG.error("Storage Container Manager clientRpcServer stop failed.", ex);
+    }
+
+    try {
+      LOG.info("Stopping the RPC server for DataNodes");
+      datanodeRpcServer.stop();
+    } catch (Exception ex) {
+      LOG.error("Storage Container Manager datanodeRpcServer stop failed.", ex);
+    }
+
+    try {
+      LOG.info("Stopping Storage Container Manager HTTP server.");
+      httpServer.stop();
+    } catch (Exception ex) {
+      LOG.error("Storage Container Manager HTTP server stop failed.", ex);
+    }
+
+    try {
+      LOG.info("Stopping Block Manager Service.");
+      scmBlockManager.stop();
+    } catch (Exception ex) {
+      LOG.error("SCM block manager service stop failed.", ex);
+    }
+
+    if (containerReportCache != null) {
+      containerReportCache.invalidateAll();
+      containerReportCache.cleanUp();
+    }
+
+    if (metrics != null) {
+      metrics.unRegister();
+    }
+
+    unregisterMXBean();
+    IOUtils.cleanupWithLogger(LOG, scmContainerManager);
+    IOUtils.cleanupWithLogger(LOG, scmNodeManager);
+  }
+
+  /**
+   * Wait until service has completed shutdown.
+   */
+  public void join() {
+    try {
+      blockRpcServer.join();
+      clientRpcServer.join();
+      datanodeRpcServer.join();
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      LOG.info("Interrupted during StorageContainerManager join.");
+    }
+  }
+
+  /**
+   * Returns SCM version.
+   *
+   * @return Version info.
+   */
+  @Override
+  public SCMVersionResponseProto getVersion(
+      SCMVersionRequestProto versionRequest) throws IOException {
+    return getScmNodeManager().getVersion(versionRequest).getProtobufMessage();
+  }
+
+  /**
+   * Used by data node to send a Heartbeat.
+   *
+   * @param datanodeDetails - Datanode Details.
+   * @param nodeReport - Node Report
+   * @param reportState - Container report ready info.
+   * @return - SCMHeartbeatResponseProto
+   * @throws IOException
+   */
+  @Override
+  public SCMHeartbeatResponseProto sendHeartbeat(
+      DatanodeDetailsProto datanodeDetails, SCMNodeReport nodeReport,
+      ReportState reportState) throws IOException {
+    List<SCMCommand> commands =
+        getScmNodeManager().sendHeartbeat(datanodeDetails, nodeReport,
+            reportState);
+    List<SCMCommandResponseProto> cmdResponses = new LinkedList<>();
+    for (SCMCommand cmd : commands) {
+      cmdResponses.add(getCommandResponse(cmd, datanodeDetails.getUuid()
+          .toString()));
+    }
+    return SCMHeartbeatResponseProto.newBuilder().addAllCommands(cmdResponses)
+        .build();
+  }
+
+  /**
+   * Register Datanode.
+   *
+   * @param datanodeDetails - DatanodID.
+   * @param scmAddresses - List of SCMs this datanode is configured to
+   * communicate.
+   * @return SCM Command.
+   */
+  @Override
+  public StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto
+      register(DatanodeDetailsProto datanodeDetails, String[] scmAddresses) {
+    // TODO : Return the list of Nodes that forms the SCM HA.
+    return getRegisteredResponse(
+        scmNodeManager.register(datanodeDetails), null);
+  }
+
+  /**
+   * Send a container report.
+   *
+   * @param reports -- Container report
+   * @return HeartbeatRespose.nullcommand.
+   * @throws IOException
+   */
+  @Override
+  public ContainerReportsResponseProto sendContainerReport(
+      ContainerReportsRequestProto reports) throws IOException {
+    updateContainerReportMetrics(reports);
+
+    // should we process container reports async?
+    scmContainerManager.processContainerReports(reports);
+    return ContainerReportsResponseProto.newBuilder().build();
+  }
+
+  private void updateContainerReportMetrics(
+      ContainerReportsRequestProto reports) {
+    ContainerStat newStat = null;
+    // TODO: We should update the logic once incremental container report
+    // type is supported.
+    if (reports
+        .getType() == ContainerReportsRequestProto.reportType.fullReport) {
+      newStat = new ContainerStat();
+      for (StorageContainerDatanodeProtocolProtos.ContainerInfo info : reports
+          .getReportsList()) {
+        newStat.add(new ContainerStat(info.getSize(), info.getUsed(),
+            info.getKeyCount(), info.getReadBytes(), info.getWriteBytes(),
+            info.getReadCount(), info.getWriteCount()));
+      }
+
+      // update container metrics
+      metrics.setLastContainerStat(newStat);
+    }
+
+    // Update container stat entry, this will trigger a removal operation if it
+    // exists in cache.
+    synchronized (containerReportCache) {
+      String datanodeUuid = reports.getDatanodeDetails().getUuid();
+      if (datanodeUuid != null && newStat != null) {
+        containerReportCache.put(datanodeUuid, newStat);
+        // update global view container metrics
+        metrics.incrContainerStat(newStat);
+      }
+    }
+  }
+
+  /**
+   * Handles the block deletion ACKs sent by datanodes. Once ACKs recieved,
+   * SCM considers the blocks are deleted and update the metadata in SCM DB.
+   *
+   * @param acks
+   * @return
+   * @throws IOException
+   */
+  @Override
+  public ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK(
+      ContainerBlocksDeletionACKProto acks) throws IOException {
+    if (acks.getResultsCount() > 0) {
+      List<DeleteBlockTransactionResult> resultList = acks.getResultsList();
+      for (DeleteBlockTransactionResult result : resultList) {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Got block deletion ACK from datanode, TXIDs={}, "
+                  + "success={}", result.getTxID(), result.getSuccess());
+        }
+        if (result.getSuccess()) {
+          LOG.debug("Purging TXID={} from block deletion log",
+              result.getTxID());
+          this.getScmBlockManager().getDeletedBlockLog()
+              .commitTransactions(Collections.singletonList(result.getTxID()));
+        } else {
+          LOG.warn("Got failed ACK for TXID={}, prepare to resend the "
+              + "TX in next interval", result.getTxID());
+        }
+      }
+    }
+    return ContainerBlocksDeletionACKResponseProto.newBuilder()
+        .getDefaultInstanceForType();
+  }
+
+  /**
+   * Returns the Number of Datanodes that are communicating with SCM.
+   *
+   * @param nodestate Healthy, Dead etc.
+   * @return int -- count
+   */
+  public int getNodeCount(NodeState nodestate) {
+    return scmNodeManager.getNodeCount(nodestate);
+  }
+
+  /**
+   * Returns SCM container manager.
+   */
+  @VisibleForTesting
+  public Mapping getScmContainerManager() {
+    return scmContainerManager;
+  }
+
+  /**
+   * Returns node manager.
+   * @return - Node Manager
+   */
+  @VisibleForTesting
+  public NodeManager getScmNodeManager() {
+    return scmNodeManager;
+  }
+
+  @VisibleForTesting
+  public BlockManager getScmBlockManager() {
+    return scmBlockManager;
+  }
+
+  /**
+   * Get block locations.
+   * @param keys batch of block keys to retrieve.
+   * @return set of allocated blocks.
+   * @throws IOException
+   */
+  @Override
+  public Set<AllocatedBlock> getBlockLocations(final Set<String> keys)
+      throws IOException {
+    Set<AllocatedBlock> locatedBlocks = new HashSet<>();
+    for (String key: keys) {
+      Pipeline pipeline = scmBlockManager.getBlock(key);
+      AllocatedBlock block = new AllocatedBlock.Builder()
+          .setKey(key)
+          .setPipeline(pipeline).build();
+      locatedBlocks.add(block);
+    }
+    return locatedBlocks;
+  }
+
+  /**
+   * Asks SCM where a block should be allocated. SCM responds with the set of
+   * datanodes that should be used creating this block.
+   *
+   * @param size - size of the block.
+   * @param type - Replication type.
+   * @param factor
+   * @return allocated block accessing info (key, pipeline).
+   * @throws IOException
+   */
+  @Override
+  public AllocatedBlock allocateBlock(long size,
+      HddsProtos.ReplicationType type, HddsProtos.ReplicationFactor factor,
+      String owner) throws IOException {
+    return scmBlockManager.allocateBlock(size, type, factor, owner);
+  }
+
+  /**
+   * Get the clusterId and SCM Id from the version file in SCM.
+   */
+  @Override
+  public ScmInfo getScmInfo() throws IOException {
+    ScmInfo.Builder builder = new ScmInfo.Builder()
+        .setClusterId(scmStorage.getClusterID())
+        .setScmId(scmStorage.getScmId());
+    return builder.build();
+  }
+  /**
+   * Delete blocks for a set of object keys.
+   *
+   * @param keyBlocksInfoList list of block keys with object keys to delete.
+   * @return deletion results.
+   */
+  public List<DeleteBlockGroupResult> deleteKeyBlocks(
+      List<BlockGroup> keyBlocksInfoList) throws IOException {
+    LOG.info("SCM is informed by KSM to delete {} blocks",
+        keyBlocksInfoList.size());
+    List<DeleteBlockGroupResult> results = new ArrayList<>();
+    for (BlockGroup keyBlocks : keyBlocksInfoList) {
+      Result resultCode;
+      try {
+        // We delete blocks in an atomic operation to prevent getting
+        // into state like only a partial of blocks are deleted,
+        // which will leave key in an inconsistent state.
+        scmBlockManager.deleteBlocks(keyBlocks.getBlockIDList());
+        resultCode = Result.success;
+      } catch (SCMException scmEx) {
+        LOG.warn("Fail to delete block: {}", keyBlocks.getGroupID(), scmEx);
+        switch (scmEx.getResult()) {
+        case CHILL_MODE_EXCEPTION:
+          resultCode = Result.chillMode;
+          break;
+        case FAILED_TO_FIND_BLOCK:
+          resultCode = Result.errorNotFound;
+          break;
+        default:
+          resultCode = Result.unknownFailure;
+        }
+      } catch (IOException ex) {
+        LOG.warn("Fail to delete blocks for object key: {}",
+            keyBlocks.getGroupID(), ex);
+        resultCode = Result.unknownFailure;
+      }
+      List<DeleteBlockResult> blockResultList = new ArrayList<>();
+      for (String blockKey : keyBlocks.getBlockIDList()) {
+        blockResultList.add(new DeleteBlockResult(blockKey, resultCode));
+      }
+      results.add(new DeleteBlockGroupResult(keyBlocks.getGroupID(),
+          blockResultList));
+    }
+    return results;
+  }
+
+  @VisibleForTesting
+  public String getPpcRemoteUsername() {
+    UserGroupInformation user = ProtobufRpcEngine.Server.getRemoteUser();
+    return user == null ? null : user.getUserName();
+  }
+
+  private void checkAdminAccess() throws IOException {
+    String remoteUser = getPpcRemoteUsername();
+    if(remoteUser != null) {
+      if (!scmAdminUsernames.contains(remoteUser)) {
+        throw new IOException(
+            "Access denied for user " + remoteUser
+                + ". Superuser privilege is required.");
+      }
+    }
+  }
+
+  /**
+   * Initialize SCM metrics.
+   */
+  public static void initMetrics() {
+    metrics = SCMMetrics.create();
+  }
+
+  /**
+   * Return SCM metrics instance.
+   */
+  public static SCMMetrics getMetrics() {
+    return metrics == null ? SCMMetrics.create() : metrics;
+  }
+
+  /**
+   * Invalidate container stat entry for given datanode.
+   *
+   * @param datanodeUuid
+   */
+  public void removeContainerReport(String datanodeUuid) {
+    synchronized (containerReportCache) {
+      containerReportCache.invalidate(datanodeUuid);
+    }
+  }
+
+  /**
+   * Get container stat of specified datanode.
+   *
+   * @param datanodeUuid
+   * @return
+   */
+  public ContainerStat getContainerReport(String datanodeUuid) {
+    ContainerStat stat = null;
+    synchronized (containerReportCache) {
+      stat = containerReportCache.getIfPresent(datanodeUuid);
+    }
+
+    return stat;
+  }
+
+  /**
+   * Returns a view of the container stat entries. Modifications made to the
+   * map will directly affect the cache.
+   *
+   * @return
+   */
+  public ConcurrentMap<String, ContainerStat> getContainerReportCache() {
+    return containerReportCache.asMap();
+  }
+
+  @Override
+  public Map<String, String> getContainerReport() {
+    Map<String, String> id2StatMap = new HashMap<>();
+    synchronized (containerReportCache) {
+      ConcurrentMap<String, ContainerStat> map = containerReportCache.asMap();
+      for (Map.Entry<String, ContainerStat> entry : map.entrySet()) {
+        id2StatMap.put(entry.getKey(), entry.getValue().toJsonString());
+      }
+    }
+
+    return id2StatMap;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManagerHttpServer.java
new file mode 100644
index 0000000..1ca059c
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/StorageContainerManagerHttpServer.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.server.BaseHttpServer;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+
+import java.io.IOException;
+
+/**
+ * HttpServer2 wrapper for the Ozone Storage Container Manager.
+ */
+public class StorageContainerManagerHttpServer extends BaseHttpServer {
+
+  public StorageContainerManagerHttpServer(Configuration conf)
+      throws IOException {
+    super(conf, "scm");
+  }
+
+  @Override protected String getHttpAddressKey() {
+    return ScmConfigKeys.OZONE_SCM_HTTP_ADDRESS_KEY;
+  }
+
+  @Override protected String getHttpBindHostKey() {
+    return ScmConfigKeys.OZONE_SCM_HTTP_BIND_HOST_KEY;
+  }
+
+  @Override protected String getHttpsAddressKey() {
+    return ScmConfigKeys.OZONE_SCM_HTTPS_ADDRESS_KEY;
+  }
+
+  @Override protected String getHttpsBindHostKey() {
+    return ScmConfigKeys.OZONE_SCM_HTTPS_BIND_HOST_KEY;
+  }
+
+  @Override protected String getBindHostDefault() {
+    return ScmConfigKeys.OZONE_SCM_HTTP_BIND_HOST_DEFAULT;
+  }
+
+  @Override protected int getHttpBindPortDefault() {
+    return ScmConfigKeys.OZONE_SCM_HTTP_BIND_PORT_DEFAULT;
+  }
+
+  @Override protected int getHttpsBindPortDefault() {
+    return ScmConfigKeys.OZONE_SCM_HTTPS_BIND_PORT_DEFAULT;
+  }
+
+  @Override protected String getKeytabFile() {
+    return ScmConfigKeys.OZONE_SCM_KEYTAB_FILE;
+  }
+
+  @Override protected String getSpnegoPrincipal() {
+    return OzoneConfigKeys.OZONE_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL;
+  }
+
+  @Override protected String getEnabledKey() {
+    return ScmConfigKeys.OZONE_SCM_HTTP_ENABLED_KEY;
+  }
+
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManager.java
new file mode 100644
index 0000000..4ab2516
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManager.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.block;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ *
+ *  Block APIs.
+ *  Container is transparent to these APIs.
+ */
+public interface BlockManager extends Closeable {
+  /**
+   * Allocates a new block for a given size.
+   * @param size - Block Size
+   * @param type Replication Type
+   * @param factor - Replication Factor
+   * @return AllocatedBlock
+   * @throws IOException
+   */
+  AllocatedBlock allocateBlock(long size, HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, String owner) throws IOException;
+
+  /**
+   *  Give the key to the block, get the pipeline info.
+   * @param key - key to the block.
+   * @return - Pipeline that used to access the block.
+   * @throws IOException
+   */
+  Pipeline getBlock(String key) throws IOException;
+
+  /**
+   * Deletes a list of blocks in an atomic operation. Internally, SCM
+   * writes these blocks into a {@link DeletedBlockLog} and deletes them
+   * from SCM DB. If this is successful, given blocks are entering pending
+   * deletion state and becomes invisible from SCM namespace.
+   *
+   * @param blockIDs block IDs. This is often the list of blocks of
+   *                 a particular object key.
+   * @throws IOException if exception happens, non of the blocks is deleted.
+   */
+  void deleteBlocks(List<String> blockIDs) throws IOException;
+
+  /**
+   * @return the block deletion transaction log maintained by SCM.
+   */
+  DeletedBlockLog getDeletedBlockLog();
+
+  /**
+   * Start block manager background services.
+   * @throws IOException
+   */
+  void start() throws IOException;
+
+  /**
+   * Shutdown block manager background services.
+   * @throws IOException
+   */
+  void stop() throws IOException;
+
+  /**
+   * @return the block deleting service executed in SCM.
+   */
+  SCMBlockDeletingService getSCMBlockDeletingService();
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java
new file mode 100644
index 0000000..d966112
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java
@@ -0,0 +1,530 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.block;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.Mapping;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.management.ObjectName;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .CHILL_MODE_EXCEPTION;
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_FIND_BLOCK;
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .INVALID_BLOCK_SIZE;
+import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_INTERVAL;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_TIMEOUT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConsts.BLOCK_DB;
+
+/** Block Manager manages the block access for SCM. */
+public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BlockManagerImpl.class);
+  // TODO : FIX ME : Hard coding the owner.
+  // Currently only user of the block service is Ozone, CBlock manages blocks
+  // by itself and does not rely on the Block service offered by SCM.
+
+  private final NodeManager nodeManager;
+  private final Mapping containerManager;
+  private final MetadataStore blockStore;
+
+  private final Lock lock;
+  private final long containerSize;
+  private final long cacheSize;
+
+  private final DeletedBlockLog deletedBlockLog;
+  private final SCMBlockDeletingService blockDeletingService;
+
+  private final int containerProvisionBatchSize;
+  private final Random rand;
+  private ObjectName mxBean;
+
+  /**
+   * Constructor.
+   *
+   * @param conf - configuration.
+   * @param nodeManager - node manager.
+   * @param containerManager - container manager.
+   * @param cacheSizeMB - cache size for level db store.
+   * @throws IOException
+   */
+  public BlockManagerImpl(final Configuration conf,
+      final NodeManager nodeManager, final Mapping containerManager,
+      final int cacheSizeMB) throws IOException {
+    this.nodeManager = nodeManager;
+    this.containerManager = containerManager;
+    this.cacheSize = cacheSizeMB;
+
+    this.containerSize = OzoneConsts.GB * conf.getInt(
+        ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB,
+        ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT);
+    File metaDir = getOzoneMetaDirPath(conf);
+    String scmMetaDataDir = metaDir.getPath();
+
+    // Write the block key to container name mapping.
+    File blockContainerDbPath = new File(scmMetaDataDir, BLOCK_DB);
+    blockStore =
+        MetadataStoreBuilder.newBuilder()
+            .setConf(conf)
+            .setDbFile(blockContainerDbPath)
+            .setCacheSize(this.cacheSize * OzoneConsts.MB)
+            .build();
+
+    this.containerProvisionBatchSize =
+        conf.getInt(
+            ScmConfigKeys.OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE,
+            ScmConfigKeys.OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE_DEFAULT);
+    rand = new Random();
+    this.lock = new ReentrantLock();
+
+    mxBean = MBeans.register("BlockManager", "BlockManagerImpl", this);
+
+    // SCM block deleting transaction log and deleting service.
+    deletedBlockLog = new DeletedBlockLogImpl(conf);
+    long svcInterval =
+        conf.getTimeDuration(OZONE_BLOCK_DELETING_SERVICE_INTERVAL,
+            OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT,
+            TimeUnit.MILLISECONDS);
+    long serviceTimeout =
+        conf.getTimeDuration(
+            OZONE_BLOCK_DELETING_SERVICE_TIMEOUT,
+            OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT,
+            TimeUnit.MILLISECONDS);
+    blockDeletingService =
+        new SCMBlockDeletingService(
+            deletedBlockLog, containerManager, nodeManager, svcInterval,
+            serviceTimeout, conf);
+  }
+
+  /**
+   * Start block manager services.
+   *
+   * @throws IOException
+   */
+  public void start() throws IOException {
+    this.blockDeletingService.start();
+  }
+
+  /**
+   * Shutdown block manager services.
+   *
+   * @throws IOException
+   */
+  public void stop() throws IOException {
+    this.blockDeletingService.shutdown();
+    this.close();
+  }
+
+  /**
+   * Pre allocate specified count of containers for block creation.
+   *
+   * @param count - Number of containers to allocate.
+   * @param type - Type of containers
+   * @param factor - how many copies needed for this container.
+   * @throws IOException
+   */
+  private void preAllocateContainers(int count, ReplicationType type,
+      ReplicationFactor factor, String owner)
+      throws IOException {
+    lock.lock();
+    try {
+      for (int i = 0; i < count; i++) {
+        String containerName = UUID.randomUUID().toString();
+        ContainerInfo containerInfo = null;
+        try {
+          // TODO: Fix this later when Ratis is made the Default.
+          containerInfo = containerManager.allocateContainer(type, factor,
+              containerName, owner);
+
+          if (containerInfo == null) {
+            LOG.warn("Unable to allocate container.");
+            continue;
+          }
+        } catch (IOException ex) {
+          LOG.warn("Unable to allocate container: {}", ex);
+          continue;
+        }
+      }
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Allocates a block in a container and returns that info.
+   *
+   * @param size - Block Size
+   * @param type Replication Type
+   * @param factor - Replication Factor
+   * @return Allocated block
+   * @throws IOException on failure.
+   */
+  @Override
+  public AllocatedBlock allocateBlock(final long size,
+      ReplicationType type, ReplicationFactor factor, String owner)
+      throws IOException {
+    LOG.trace("Size;{} , type : {}, factor : {} ", size, type, factor);
+
+    if (size < 0 || size > containerSize) {
+      LOG.warn("Invalid block size requested : {}", size);
+      throw new SCMException("Unsupported block size: " + size,
+          INVALID_BLOCK_SIZE);
+    }
+
+    if (!nodeManager.isOutOfChillMode()) {
+      LOG.warn("Not out of Chill mode.");
+      throw new SCMException("Unable to create block while in chill mode",
+          CHILL_MODE_EXCEPTION);
+    }
+
+    lock.lock();
+    try {
+      /*
+               Here is the high level logic.
+
+               1. First we check if there are containers in ALLOCATED state,
+               that is
+                SCM has allocated them in the SCM namespace but the
+                corresponding
+                container has not been created in the Datanode yet. If we
+                have any
+                in that state, we will return that to the client, which allows
+                client to finish creating those containers. This is a sort of
+                 greedy
+                 algorithm, our primary purpose is to get as many containers as
+                 possible.
+
+                2. If there are no allocated containers -- Then we find a Open
+                container that matches that pattern.
+
+                3. If both of them fail, the we will pre-allocate a bunch of
+                conatainers in SCM and try again.
+
+               TODO : Support random picking of two containers from the list.
+                So we
+               can use different kind of policies.
+      */
+
+      ContainerInfo containerInfo;
+
+      // Look for ALLOCATED container that matches all other parameters.
+      containerInfo =
+          containerManager
+              .getStateManager()
+              .getMatchingContainer(
+                  size, owner, type, factor, HddsProtos.LifeCycleState
+                      .ALLOCATED);
+      if (containerInfo != null) {
+        containerManager.updateContainerState(containerInfo.getContainerName(),
+            HddsProtos.LifeCycleEvent.CREATE);
+        return newBlock(containerInfo, HddsProtos.LifeCycleState.ALLOCATED);
+      }
+
+      // Since we found no allocated containers that match our criteria, let us
+      // look for OPEN containers that match the criteria.
+      containerInfo =
+          containerManager
+              .getStateManager()
+              .getMatchingContainer(size, owner, type, factor, HddsProtos
+                  .LifeCycleState.OPEN);
+      if (containerInfo != null) {
+        return newBlock(containerInfo, HddsProtos.LifeCycleState.OPEN);
+      }
+
+      // We found neither ALLOCATED or OPEN Containers. This generally means
+      // that most of our containers are full or we have not allocated
+      // containers of the type and replication factor. So let us go and
+      // allocate some.
+      preAllocateContainers(containerProvisionBatchSize, type, factor, owner);
+
+      // Since we just allocated a set of containers this should work
+      containerInfo =
+          containerManager
+              .getStateManager()
+              .getMatchingContainer(
+                  size, owner, type, factor, HddsProtos.LifeCycleState
+                      .ALLOCATED);
+      if (containerInfo != null) {
+        containerManager.updateContainerState(containerInfo.getContainerName(),
+            HddsProtos.LifeCycleEvent.CREATE);
+        return newBlock(containerInfo, HddsProtos.LifeCycleState.ALLOCATED);
+      }
+
+      // we have tried all strategies we know and but somehow we are not able
+      // to get a container for this block. Log that info and return a null.
+      LOG.error(
+          "Unable to allocate a block for the size: {}, type: {}, " +
+              "factor: {}",
+          size,
+          type,
+          factor);
+      return null;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * newBlock - returns a new block assigned to a container.
+   *
+   * @param containerInfo - Container Info.
+   * @param state - Current state of the container.
+   * @return AllocatedBlock
+   */
+  private AllocatedBlock newBlock(
+      ContainerInfo containerInfo, HddsProtos.LifeCycleState state)
+      throws IOException {
+
+    // TODO : Replace this with Block ID.
+    String blockKey = UUID.randomUUID().toString();
+    boolean createContainer = (state == HddsProtos.LifeCycleState.ALLOCATED);
+
+    AllocatedBlock.Builder abb =
+        new AllocatedBlock.Builder()
+            .setKey(blockKey)
+            // TODO : Use containerinfo instead of pipeline.
+            .setPipeline(containerInfo.getPipeline())
+            .setShouldCreateContainer(createContainer);
+    LOG.trace("New block allocated : {} Container ID: {}", blockKey,
+        containerInfo.toString());
+
+    if (containerInfo.getPipeline().getMachines().size() == 0) {
+      LOG.error("Pipeline Machine count is zero.");
+      return null;
+    }
+
+    // Persist this block info to the blockStore DB, so getBlock(key) can
+    // find which container the block lives.
+    // TODO : Remove this DB in future
+    // and make this a KSM operation. Category: SCALABILITY.
+    if (containerInfo.getPipeline().getMachines().size() > 0) {
+      blockStore.put(
+          DFSUtil.string2Bytes(blockKey),
+          DFSUtil.string2Bytes(containerInfo.getPipeline().getContainerName()));
+    }
+    return abb.build();
+  }
+
+  /**
+   * Given a block key, return the Pipeline information.
+   *
+   * @param key - block key assigned by SCM.
+   * @return Pipeline (list of DNs and leader) to access the block.
+   * @throws IOException
+   */
+  @Override
+  public Pipeline getBlock(final String key) throws IOException {
+    lock.lock();
+    try {
+      byte[] containerBytes = blockStore.get(DFSUtil.string2Bytes(key));
+      if (containerBytes == null) {
+        throw new SCMException(
+            "Specified block key does not exist. key : " + key,
+            FAILED_TO_FIND_BLOCK);
+      }
+
+      String containerName = DFSUtil.bytes2String(containerBytes);
+      ContainerInfo containerInfo = containerManager.getContainer(
+          containerName);
+      if (containerInfo == null) {
+        LOG.debug("Container {} allocated by block service"
+            + "can't be found in SCM", containerName);
+        throw new SCMException(
+            "Unable to find container for the block",
+            SCMException.ResultCodes.FAILED_TO_FIND_CONTAINER);
+      }
+      return containerInfo.getPipeline();
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Deletes a list of blocks in an atomic operation. Internally, SCM writes
+   * these blocks into a
+   * {@link DeletedBlockLog} and deletes them from SCM DB. If this is
+   * successful, given blocks are
+   * entering pending deletion state and becomes invisible from SCM namespace.
+   *
+   * @param blockIDs block IDs. This is often the list of blocks of a
+   * particular object key.
+   * @throws IOException if exception happens, non of the blocks is deleted.
+   */
+  @Override
+  public void deleteBlocks(List<String> blockIDs) throws IOException {
+    if (!nodeManager.isOutOfChillMode()) {
+      throw new SCMException("Unable to delete block while in chill mode",
+          CHILL_MODE_EXCEPTION);
+    }
+
+    lock.lock();
+    LOG.info("Deleting blocks {}", String.join(",", blockIDs));
+    Map<String, List<String>> containerBlocks = new HashMap<>();
+    BatchOperation batch = new BatchOperation();
+    BatchOperation rollbackBatch = new BatchOperation();
+    // TODO: track the block size info so that we can reclaim the container
+    // TODO: used space when the block is deleted.
+    try {
+      for (String blockKey : blockIDs) {
+        byte[] blockKeyBytes = DFSUtil.string2Bytes(blockKey);
+        byte[] containerBytes = blockStore.get(blockKeyBytes);
+        if (containerBytes == null) {
+          throw new SCMException(
+              "Specified block key does not exist. key : " + blockKey,
+              FAILED_TO_FIND_BLOCK);
+        }
+        batch.delete(blockKeyBytes);
+        rollbackBatch.put(blockKeyBytes, containerBytes);
+
+        // Merge blocks to a container to blocks mapping,
+        // prepare to persist this info to the deletedBlocksLog.
+        String containerName = DFSUtil.bytes2String(containerBytes);
+        if (containerBlocks.containsKey(containerName)) {
+          containerBlocks.get(containerName).add(blockKey);
+        } else {
+          List<String> item = new ArrayList<>();
+          item.add(blockKey);
+          containerBlocks.put(containerName, item);
+        }
+      }
+
+      // We update SCM DB first, so if this step fails, we end up here,
+      // nothing gets into the delLog so no blocks will be accidentally
+      // removed. If we write the log first, once log is written, the
+      // async deleting service will start to scan and might be picking
+      // up some blocks to do real deletions, that might cause data loss.
+      blockStore.writeBatch(batch);
+      try {
+        deletedBlockLog.addTransactions(containerBlocks);
+      } catch (IOException e) {
+        try {
+          // If delLog update is failed, we need to rollback the changes.
+          blockStore.writeBatch(rollbackBatch);
+        } catch (IOException rollbackException) {
+          // This is a corner case. AddTX fails and rollback also fails,
+          // this will leave these blocks in inconsistent state. They were
+          // moved to pending deletion state in SCM DB but were not written
+          // into delLog so real deletions would not be done. Blocks become
+          // to be invisible from namespace but actual data are not removed.
+          // We log an error here so admin can manually check and fix such
+          // errors.
+          LOG.error(
+              "Blocks might be in inconsistent state because"
+                  + " they were moved to pending deletion state in SCM DB but"
+                  + " not written into delLog. Admin can manually add them"
+                  + " into delLog for deletions. Inconsistent block list: {}",
+              String.join(",", blockIDs),
+              e);
+          throw rollbackException;
+        }
+        throw new IOException(
+            "Skip writing the deleted blocks info to"
+                + " the delLog because addTransaction fails. Batch skipped: "
+                + String.join(",", blockIDs),
+            e);
+      }
+      // TODO: Container report handling of the deleted blocks:
+      // Remove tombstone and update open container usage.
+      // We will revisit this when the closed container replication is done.
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  @Override
+  public DeletedBlockLog getDeletedBlockLog() {
+    return this.deletedBlockLog;
+  }
+
+  @VisibleForTesting
+  public String getDeletedKeyName(String key) {
+    return StringUtils.format(".Deleted/%s", key);
+  }
+
+  /**
+   * Close the resources for BlockManager.
+   *
+   * @throws IOException
+   */
+  @Override
+  public void close() throws IOException {
+    if (blockStore != null) {
+      blockStore.close();
+    }
+    if (deletedBlockLog != null) {
+      deletedBlockLog.close();
+    }
+    blockDeletingService.shutdown();
+    if (mxBean != null) {
+      MBeans.unregister(mxBean);
+      mxBean = null;
+    }
+  }
+
+  @Override
+  public int getOpenContainersNo() {
+    return 0;
+    // TODO : FIX ME : The open container being a single number does not make
+    // sense.
+    // We have to get open containers by Replication Type and Replication
+    // factor. Hence returning 0 for now.
+    // containers.get(HddsProtos.LifeCycleState.OPEN).size();
+  }
+
+  @Override
+  public SCMBlockDeletingService getSCMBlockDeletingService() {
+    return this.blockDeletingService;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockmanagerMXBean.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockmanagerMXBean.java
new file mode 100644
index 0000000..23c6983
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockmanagerMXBean.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.block;
+
+
+/**
+ * JMX interface for the block manager.
+ */
+public interface BlockmanagerMXBean {
+
+  /**
+   * Number of open containers manager by the block manager.
+   */
+  int getOpenContainersNo();
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DatanodeDeletedBlockTransactions.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DatanodeDeletedBlockTransactions.java
new file mode 100644
index 0000000..47074d2
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DatanodeDeletedBlockTransactions.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.block;
+
+import com.google.common.collect.ArrayListMultimap;
+import org.apache.hadoop.hdds.scm.container.Mapping;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+import java.util.stream.Collectors;
+
+/**
+ * A wrapper class to hold info about datanode and all deleted block
+ * transactions that will be sent to this datanode.
+ */
+public class DatanodeDeletedBlockTransactions {
+  private int nodeNum;
+  // The throttle size for each datanode.
+  private int maximumAllowedTXNum;
+  // Current counter of inserted TX.
+  private int currentTXNum;
+  private Mapping mappingService;
+  // A list of TXs mapped to a certain datanode ID.
+  private final ArrayListMultimap<UUID, DeletedBlocksTransaction>
+      transactions;
+
+  DatanodeDeletedBlockTransactions(Mapping mappingService,
+      int maximumAllowedTXNum, int nodeNum) {
+    this.transactions = ArrayListMultimap.create();
+    this.mappingService = mappingService;
+    this.maximumAllowedTXNum = maximumAllowedTXNum;
+    this.nodeNum = nodeNum;
+  }
+
+  public void addTransaction(DeletedBlocksTransaction tx) throws IOException {
+    ContainerInfo info = null;
+    try {
+      info = mappingService.getContainer(tx.getContainerName());
+    } catch (IOException e) {
+      SCMBlockDeletingService.LOG.warn("Got container info error.", e);
+    }
+
+    if (info == null) {
+      SCMBlockDeletingService.LOG.warn(
+          "Container {} not found, continue to process next",
+          tx.getContainerName());
+      return;
+    }
+
+    for (DatanodeDetails dd : info.getPipeline().getMachines()) {
+      UUID dnID = dd.getUuid();
+      if (transactions.containsKey(dnID)) {
+        List<DeletedBlocksTransaction> txs = transactions.get(dnID);
+        if (txs != null && txs.size() < maximumAllowedTXNum) {
+          boolean hasContained = false;
+          for (DeletedBlocksTransaction t : txs) {
+            if (t.getContainerName().equals(tx.getContainerName())) {
+              hasContained = true;
+              break;
+            }
+          }
+
+          if (!hasContained) {
+            txs.add(tx);
+            currentTXNum++;
+          }
+        }
+      } else {
+        currentTXNum++;
+        transactions.put(dnID, tx);
+      }
+      SCMBlockDeletingService.LOG.debug("Transaction added: {} <- TX({})", dnID,
+          tx.getTxID());
+    }
+  }
+
+  Set<UUID> getDatanodeIDs() {
+    return transactions.keySet();
+  }
+
+  boolean isEmpty() {
+    return transactions.isEmpty();
+  }
+
+  boolean hasTransactions(UUID dnId) {
+    return transactions.containsKey(dnId) &&
+        !transactions.get(dnId).isEmpty();
+  }
+
+  List<DeletedBlocksTransaction> getDatanodeTransactions(UUID dnId) {
+    return transactions.get(dnId);
+  }
+
+  List<String> getTransactionIDList(UUID dnId) {
+    if (hasTransactions(dnId)) {
+      return transactions.get(dnId).stream()
+          .map(DeletedBlocksTransaction::getTxID).map(String::valueOf)
+          .collect(Collectors.toList());
+    } else {
+      return Collections.emptyList();
+    }
+  }
+
+  boolean isFull() {
+    return currentTXNum >= maximumAllowedTXNum * nodeNum;
+  }
+
+  int getTXNum() {
+    return currentTXNum;
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLog.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLog.java
new file mode 100644
index 0000000..f7b770e
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLog.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.block;
+
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * The DeletedBlockLog is a persisted log in SCM to keep tracking
+ * container blocks which are under deletion. It maintains info
+ * about under-deletion container blocks that notified by KSM,
+ * and the state how it is processed.
+ */
+public interface DeletedBlockLog extends Closeable {
+
+  /**
+   *  A limit size list of transactions. Note count is the max number
+   *  of TXs to return, we might not be able to always return this
+   *  number. and the processCount of those transactions
+   *  should be [0, MAX_RETRY).
+   *
+   * @param count - number of transactions.
+   * @return a list of BlockDeletionTransaction.
+   */
+  List<DeletedBlocksTransaction> getTransactions(int count)
+      throws IOException;
+
+  /**
+   * Scan entire log once and returns TXs to DatanodeDeletedBlockTransactions.
+   * Once DatanodeDeletedBlockTransactions is full, the scan behavior will
+   * stop.
+   * @param transactions a list of TXs will be set into.
+   * @throws IOException
+   */
+  void getTransactions(DatanodeDeletedBlockTransactions transactions)
+      throws IOException;
+
+  /**
+   * Return all failed transactions in the log. A transaction is considered
+   * to be failed if it has been sent more than MAX_RETRY limit and its
+   * count is reset to -1.
+   *
+   * @return a list of failed deleted block transactions.
+   * @throws IOException
+   */
+  List<DeletedBlocksTransaction> getFailedTransactions()
+      throws IOException;
+
+  /**
+   * Increments count for given list of transactions by 1.
+   * The log maintains a valid range of counts for each transaction
+   * [0, MAX_RETRY]. If exceed this range, resets it to -1 to indicate
+   * the transaction is no longer valid.
+   *
+   * @param txIDs - transaction ID.
+   */
+  void incrementCount(List<Long> txIDs)
+      throws IOException;
+
+  /**
+   * Commits a transaction means to delete all footprints of a transaction
+   * from the log. This method doesn't guarantee all transactions can be
+   * successfully deleted, it tolerate failures and tries best efforts to.
+   *
+   * @param txIDs - transaction IDs.
+   */
+  void commitTransactions(List<Long> txIDs) throws IOException;
+
+  /**
+   * Creates a block deletion transaction and adds that into the log.
+   *
+   * @param containerName - container name.
+   * @param blocks - blocks that belong to the same container.
+   *
+   * @throws IOException
+   */
+  void addTransaction(String containerName, List<String> blocks)
+      throws IOException;
+
+  /**
+   * Creates block deletion transactions for a set of containers,
+   * add into the log and persist them atomically. An object key
+   * might be stored in multiple containers and multiple blocks,
+   * this API ensures that these updates are done in atomic manner
+   * so if any of them fails, the entire operation fails without
+   * any updates to the log. Note, this doesn't mean to create only
+   * one transaction, it creates multiple transactions (depends on the
+   * number of containers) together (on success) or non (on failure).
+   *
+   * @param containerBlocksMap a map of containerBlocks.
+   * @throws IOException
+   */
+  void addTransactions(Map<String, List<String>> containerBlocksMap)
+      throws IOException;
+
+  /**
+   * Returns the total number of valid transactions. A transaction is
+   * considered to be valid as long as its count is in range [0, MAX_RETRY].
+   *
+   * @return number of a valid transactions.
+   * @throws IOException
+   */
+  int getNumOfValidTransactions() throws IOException;
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java
new file mode 100644
index 0000000..0f4988a
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java
@@ -0,0 +1,356 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.block;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Lists;
+import com.google.common.primitives.Longs;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_BLOCK_DELETION_MAX_RETRY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_BLOCK_DELETION_MAX_RETRY_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_MB;
+import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
+import static org.apache.hadoop.ozone.OzoneConsts.DELETED_BLOCK_DB;
+
+/**
+ * A implement class of {@link DeletedBlockLog}, and it uses
+ * K/V db to maintain block deletion transactions between scm and datanode.
+ * This is a very basic implementation, it simply scans the log and
+ * memorize the position that scanned by last time, and uses this to
+ * determine where the next scan starts. It has no notion about weight
+ * of each transaction so as long as transaction is still valid, they get
+ * equally same chance to be retrieved which only depends on the nature
+ * order of the transaction ID.
+ */
+public class DeletedBlockLogImpl implements DeletedBlockLog {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DeletedBlockLogImpl.class);
+
+  private static final byte[] LATEST_TXID =
+      DFSUtil.string2Bytes("#LATEST_TXID#");
+
+  private final int maxRetry;
+  private final MetadataStore deletedStore;
+  private final Lock lock;
+  // The latest id of deleted blocks in the db.
+  private long lastTxID;
+  private long lastReadTxID;
+
+  public DeletedBlockLogImpl(Configuration conf) throws IOException {
+    maxRetry = conf.getInt(OZONE_SCM_BLOCK_DELETION_MAX_RETRY,
+        OZONE_SCM_BLOCK_DELETION_MAX_RETRY_DEFAULT);
+
+    File metaDir = getOzoneMetaDirPath(conf);
+    String scmMetaDataDir = metaDir.getPath();
+    File deletedLogDbPath = new File(scmMetaDataDir, DELETED_BLOCK_DB);
+    int cacheSize = conf.getInt(OZONE_SCM_DB_CACHE_SIZE_MB,
+        OZONE_SCM_DB_CACHE_SIZE_DEFAULT);
+    // Load store of all transactions.
+    deletedStore = MetadataStoreBuilder.newBuilder()
+        .setCreateIfMissing(true)
+        .setConf(conf)
+        .setDbFile(deletedLogDbPath)
+        .setCacheSize(cacheSize * OzoneConsts.MB)
+        .build();
+
+    this.lock = new ReentrantLock();
+    // start from the head of deleted store.
+    lastReadTxID = 0;
+    lastTxID = findLatestTxIDInStore();
+  }
+
+  @VisibleForTesting
+  MetadataStore getDeletedStore() {
+    return deletedStore;
+  }
+
+  /**
+   * There is no need to lock before reading because
+   * it's only used in construct method.
+   *
+   * @return latest txid.
+   * @throws IOException
+   */
+  private long findLatestTxIDInStore() throws IOException {
+    long txid = 0;
+    byte[] value = deletedStore.get(LATEST_TXID);
+    if (value != null) {
+      txid = Longs.fromByteArray(value);
+    }
+    return txid;
+  }
+
+  @Override
+  public List<DeletedBlocksTransaction> getTransactions(
+      int count) throws IOException {
+    List<DeletedBlocksTransaction> result = new ArrayList<>();
+    MetadataKeyFilter getNextTxID = (preKey, currentKey, nextKey)
+        -> Longs.fromByteArray(currentKey) > lastReadTxID;
+    MetadataKeyFilter avoidInvalidTxid = (preKey, currentKey, nextKey)
+        -> !Arrays.equals(LATEST_TXID, currentKey);
+    lock.lock();
+    try {
+      deletedStore.iterate(null, (key, value) -> {
+        if (getNextTxID.filterKey(null, key, null) &&
+            avoidInvalidTxid.filterKey(null, key, null)) {
+          DeletedBlocksTransaction block = DeletedBlocksTransaction
+              .parseFrom(value);
+          if (block.getCount() > -1 && block.getCount() <= maxRetry) {
+            result.add(block);
+          }
+        }
+        return result.size() < count;
+      });
+      // Scan the metadata from the beginning.
+      if (result.size() < count || result.size() < 1) {
+        lastReadTxID = 0;
+      } else {
+        lastReadTxID = result.get(result.size() - 1).getTxID();
+      }
+    } finally {
+      lock.unlock();
+    }
+    return result;
+  }
+
+  @Override
+  public List<DeletedBlocksTransaction> getFailedTransactions()
+      throws IOException {
+    lock.lock();
+    try {
+      final List<DeletedBlocksTransaction> failedTXs = Lists.newArrayList();
+      deletedStore.iterate(null, (key, value) -> {
+        if (!Arrays.equals(LATEST_TXID, key)) {
+          DeletedBlocksTransaction delTX =
+              DeletedBlocksTransaction.parseFrom(value);
+          if (delTX.getCount() == -1) {
+            failedTXs.add(delTX);
+          }
+        }
+        return true;
+      });
+      return failedTXs;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * @param txIDs - transaction ID.
+   * @throws IOException
+   */
+  @Override
+  public void incrementCount(List<Long> txIDs) throws IOException {
+    BatchOperation batch = new BatchOperation();
+    lock.lock();
+    try {
+      for(Long txID : txIDs) {
+        try {
+          DeletedBlocksTransaction block = DeletedBlocksTransaction
+              .parseFrom(deletedStore.get(Longs.toByteArray(txID)));
+          DeletedBlocksTransaction.Builder builder = block.toBuilder();
+          int currentCount = block.getCount();
+          if (currentCount > -1) {
+            builder.setCount(++currentCount);
+          }
+          // if the retry time exceeds the maxRetry value
+          // then set the retry value to -1, stop retrying, admins can
+          // analyze those blocks and purge them manually by SCMCli.
+          if (currentCount > maxRetry) {
+            builder.setCount(-1);
+          }
+          deletedStore.put(Longs.toByteArray(txID),
+              builder.build().toByteArray());
+        } catch (IOException ex) {
+          LOG.warn("Cannot increase count for txID " + txID, ex);
+        }
+      }
+      deletedStore.writeBatch(batch);
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  private DeletedBlocksTransaction constructNewTransaction(long txID,
+      String containerName, List<String> blocks) {
+    return DeletedBlocksTransaction.newBuilder()
+        .setTxID(txID)
+        .setContainerName(containerName)
+        .addAllBlockID(blocks)
+        .setCount(0)
+        .build();
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * @param txIDs - transaction IDs.
+   * @throws IOException
+   */
+  @Override
+  public void commitTransactions(List<Long> txIDs) throws IOException {
+    lock.lock();
+    try {
+      for (Long txID : txIDs) {
+        try {
+          deletedStore.delete(Longs.toByteArray(txID));
+        } catch (IOException ex) {
+          LOG.warn("Cannot commit txID " + txID, ex);
+        }
+      }
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * @param containerName - container name.
+   * @param blocks - blocks that belong to the same container.
+   * @throws IOException
+   */
+  @Override
+  public void addTransaction(String containerName, List<String> blocks)
+      throws IOException {
+    BatchOperation batch = new BatchOperation();
+    lock.lock();
+    try {
+      DeletedBlocksTransaction tx = constructNewTransaction(lastTxID + 1,
+          containerName, blocks);
+      byte[] key = Longs.toByteArray(lastTxID + 1);
+
+      batch.put(key, tx.toByteArray());
+      batch.put(LATEST_TXID, Longs.toByteArray(lastTxID + 1));
+
+      deletedStore.writeBatch(batch);
+      lastTxID += 1;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  @Override
+  public int getNumOfValidTransactions() throws IOException {
+    lock.lock();
+    try {
+      final AtomicInteger num = new AtomicInteger(0);
+      deletedStore.iterate(null, (key, value) -> {
+        // Exclude latest txid record
+        if (!Arrays.equals(LATEST_TXID, key)) {
+          DeletedBlocksTransaction delTX =
+              DeletedBlocksTransaction.parseFrom(value);
+          if (delTX.getCount() > -1) {
+            num.incrementAndGet();
+          }
+        }
+        return true;
+      });
+      return num.get();
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * @param containerBlocksMap a map of containerBlocks.
+   * @throws IOException
+   */
+  @Override
+  public void addTransactions(Map<String, List<String>> containerBlocksMap)
+      throws IOException {
+    BatchOperation batch = new BatchOperation();
+    lock.lock();
+    try {
+      long currentLatestID = lastTxID;
+      for (Map.Entry<String, List<String>> entry :
+          containerBlocksMap.entrySet()) {
+        currentLatestID += 1;
+        byte[] key = Longs.toByteArray(currentLatestID);
+        DeletedBlocksTransaction tx = constructNewTransaction(currentLatestID,
+            entry.getKey(), entry.getValue());
+        batch.put(key, tx.toByteArray());
+      }
+      lastTxID = currentLatestID;
+      batch.put(LATEST_TXID, Longs.toByteArray(lastTxID));
+      deletedStore.writeBatch(batch);
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  @Override
+  public void close() throws IOException {
+    if (deletedStore != null) {
+      deletedStore.close();
+    }
+  }
+
+  @Override
+  public void getTransactions(DatanodeDeletedBlockTransactions transactions)
+      throws IOException {
+    lock.lock();
+    try {
+      deletedStore.iterate(null, (key, value) -> {
+        if (!Arrays.equals(LATEST_TXID, key)) {
+          DeletedBlocksTransaction block = DeletedBlocksTransaction
+              .parseFrom(value);
+
+          if (block.getCount() > -1 && block.getCount() <= maxRetry) {
+            transactions.addTransaction(block);
+          }
+          return !transactions.isFull();
+        }
+        return true;
+      });
+    } finally {
+      lock.unlock();
+    }
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java
new file mode 100644
index 0000000..2c555e0
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.block;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.Mapping;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BackgroundService;
+import org.apache.hadoop.utils.BackgroundTask;
+import org.apache.hadoop.utils.BackgroundTaskQueue;
+import org.apache.hadoop.utils.BackgroundTaskResult.EmptyTaskResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT;
+
+/**
+ * A background service running in SCM to delete blocks. This service scans
+ * block deletion log in certain interval and caches block deletion commands
+ * in {@link org.apache.hadoop.hdds.scm.node.CommandQueue}, asynchronously
+ * SCM HB thread polls cached commands and sends them to datanode for physical
+ * processing.
+ */
+public class SCMBlockDeletingService extends BackgroundService {
+
+  static final Logger LOG =
+      LoggerFactory.getLogger(SCMBlockDeletingService.class);
+
+  // ThreadPoolSize=2, 1 for scheduler and the other for the scanner.
+  private final static int BLOCK_DELETING_SERVICE_CORE_POOL_SIZE = 2;
+  private final DeletedBlockLog deletedBlockLog;
+  private final Mapping mappingService;
+  private final NodeManager nodeManager;
+
+  // Block delete limit size is dynamically calculated based on container
+  // delete limit size (ozone.block.deleting.container.limit.per.interval)
+  // that configured for datanode. To ensure DN not wait for
+  // delete commands, we use this value multiply by a factor 2 as the final
+  // limit TX size for each node.
+  // Currently we implement a throttle algorithm that throttling delete blocks
+  // for each datanode. Each node is limited by the calculation size. Firstly
+  // current node info is fetched from nodemanager, then scan entire delLog
+  // from the beginning to end. If one node reaches maximum value, its records
+  // will be skipped. If not, keep scanning until it reaches maximum value.
+  // Once all node are full, the scan behavior will stop.
+  private int blockDeleteLimitSize;
+
+  public SCMBlockDeletingService(DeletedBlockLog deletedBlockLog,
+      Mapping mapper, NodeManager nodeManager,
+      long  interval, long serviceTimeout, Configuration conf) {
+    super("SCMBlockDeletingService", interval, TimeUnit.MILLISECONDS,
+        BLOCK_DELETING_SERVICE_CORE_POOL_SIZE, serviceTimeout);
+    this.deletedBlockLog = deletedBlockLog;
+    this.mappingService = mapper;
+    this.nodeManager = nodeManager;
+
+    int containerLimit = conf.getInt(
+        OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL,
+        OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL_DEFAULT);
+    Preconditions.checkArgument(containerLimit > 0,
+        "Container limit size should be " + "positive.");
+    // Use container limit value multiply by a factor 2 to ensure DN
+    // not wait for orders.
+    this.blockDeleteLimitSize = containerLimit * 2;
+  }
+
+  @Override
+  public BackgroundTaskQueue getTasks() {
+    BackgroundTaskQueue queue = new BackgroundTaskQueue();
+    queue.add(new DeletedBlockTransactionScanner());
+    return queue;
+  }
+
+  private class DeletedBlockTransactionScanner
+      implements BackgroundTask<EmptyTaskResult> {
+
+    @Override
+    public int getPriority() {
+      return 1;
+    }
+
+    @Override
+    public EmptyTaskResult call() throws Exception {
+      int dnTxCount = 0;
+      long startTime = Time.monotonicNow();
+      // Scan SCM DB in HB interval and collect a throttled list of
+      // to delete blocks.
+      LOG.debug("Running DeletedBlockTransactionScanner");
+      DatanodeDeletedBlockTransactions transactions = null;
+      List<DatanodeDetails> datanodes = nodeManager.getNodes(NodeState.HEALTHY);
+      if (datanodes != null) {
+        transactions = new DatanodeDeletedBlockTransactions(mappingService,
+            blockDeleteLimitSize, datanodes.size());
+        try {
+          deletedBlockLog.getTransactions(transactions);
+        } catch (IOException e) {
+          // We may tolerant a number of failures for sometime
+          // but if it continues to fail, at some point we need to raise
+          // an exception and probably fail the SCM ? At present, it simply
+          // continues to retry the scanning.
+          LOG.error("Failed to get block deletion transactions from delTX log",
+              e);
+        }
+        LOG.debug("Scanned deleted blocks log and got {} delTX to process.",
+            transactions.getTXNum());
+      }
+
+      if (transactions != null && !transactions.isEmpty()) {
+        for (UUID dnId : transactions.getDatanodeIDs()) {
+          List<DeletedBlocksTransaction> dnTXs = transactions
+              .getDatanodeTransactions(dnId);
+          if (dnTXs != null && !dnTXs.isEmpty()) {
+            dnTxCount += dnTXs.size();
+            // TODO commandQueue needs a cap.
+            // We should stop caching new commands if num of un-processed
+            // command is bigger than a limit, e.g 50. In case datanode goes
+            // offline for sometime, the cached commands be flooded.
+            nodeManager.addDatanodeCommand(dnId,
+                new DeleteBlocksCommand(dnTXs));
+            LOG.debug(
+                "Added delete block command for datanode {} in the queue,"
+                    + " number of delete block transactions: {}, TxID list: {}",
+                dnId, dnTXs.size(), String.join(",",
+                    transactions.getTransactionIDList(dnId)));
+          }
+        }
+      }
+
+      if (dnTxCount > 0) {
+        LOG.info(
+            "Totally added {} delete blocks command for"
+                + " {} datanodes, task elapsed time: {}ms",
+            dnTxCount, transactions.getDatanodeIDs().size(),
+            Time.monotonicNow() - startTime);
+      }
+
+      return EmptyTaskResult.newResult();
+    }
+  }
+
+  @VisibleForTesting
+  public void setBlockDeleteTXNum(int numTXs) {
+    blockDeleteLimitSize = numTXs;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/package-info.java
new file mode 100644
index 0000000..e1bfdff
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.block;
+/**
+ * This package contains routines to manage the block location and
+ * mapping inside SCM
+ */
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java
new file mode 100644
index 0000000..63cb3a3
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java
@@ -0,0 +1,607 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * <p>http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * <p>Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.closer.ContainerCloser;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.replication.ContainerSupervisor;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.lease.Lease;
+import org.apache.hadoop.ozone.lease.LeaseException;
+import org.apache.hadoop.ozone.lease.LeaseManager;
+import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_SIZE_GB;
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_CHANGE_CONTAINER_STATE;
+import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
+import static org.apache.hadoop.ozone.OzoneConsts.SCM_CONTAINER_DB;
+
+/**
+ * Mapping class contains the mapping from a name to a pipeline mapping. This
+ * is used by SCM when
+ * allocating new locations and when looking up a key.
+ */
+public class ContainerMapping implements Mapping {
+  private static final Logger LOG = LoggerFactory.getLogger(ContainerMapping
+      .class);
+
+  private final NodeManager nodeManager;
+  private final long cacheSize;
+  private final Lock lock;
+  private final Charset encoding = Charset.forName("UTF-8");
+  private final MetadataStore containerStore;
+  private final PipelineSelector pipelineSelector;
+  private final ContainerStateManager containerStateManager;
+  private final LeaseManager<ContainerInfo> containerLeaseManager;
+  private final ContainerSupervisor containerSupervisor;
+  private final float containerCloseThreshold;
+  private final ContainerCloser closer;
+  private final long size;
+
+  /**
+   * Constructs a mapping class that creates mapping between container names
+   * and pipelines.
+   *
+   * @param nodeManager - NodeManager so that we can get the nodes that are
+   * healthy to place new
+   * containers.
+   * @param cacheSizeMB - Amount of memory reserved for the LSM tree to cache
+   * its nodes. This is
+   * passed to LevelDB and this memory is allocated in Native code space.
+   * CacheSize is specified
+   * in MB.
+   * @throws IOException on Failure.
+   */
+  @SuppressWarnings("unchecked")
+  public ContainerMapping(
+      final Configuration conf, final NodeManager nodeManager, final int
+      cacheSizeMB) throws IOException {
+    this.nodeManager = nodeManager;
+    this.cacheSize = cacheSizeMB;
+    this.closer = new ContainerCloser(nodeManager, conf);
+
+    File metaDir = getOzoneMetaDirPath(conf);
+
+    // Write the container name to pipeline mapping.
+    File containerDBPath = new File(metaDir, SCM_CONTAINER_DB);
+    containerStore =
+        MetadataStoreBuilder.newBuilder()
+            .setConf(conf)
+            .setDbFile(containerDBPath)
+            .setCacheSize(this.cacheSize * OzoneConsts.MB)
+            .build();
+
+    this.lock = new ReentrantLock();
+
+    this.pipelineSelector = new PipelineSelector(nodeManager, conf);
+
+    // To be replaced with code getStorageSize once it is committed.
+    size = conf.getLong(OZONE_SCM_CONTAINER_SIZE_GB,
+        OZONE_SCM_CONTAINER_SIZE_DEFAULT) * 1024 * 1024 * 1024;
+    this.containerStateManager =
+        new ContainerStateManager(conf, this);
+    this.containerSupervisor =
+        new ContainerSupervisor(conf, nodeManager,
+            nodeManager.getNodePoolManager());
+    this.containerCloseThreshold = conf.getFloat(
+        ScmConfigKeys.OZONE_SCM_CONTAINER_CLOSE_THRESHOLD,
+        ScmConfigKeys.OZONE_SCM_CONTAINER_CLOSE_THRESHOLD_DEFAULT);
+    LOG.trace("Container State Manager created.");
+
+    long containerCreationLeaseTimeout = conf.getTimeDuration(
+        ScmConfigKeys.OZONE_SCM_CONTAINER_CREATION_LEASE_TIMEOUT,
+        ScmConfigKeys.OZONE_SCM_CONTAINER_CREATION_LEASE_TIMEOUT_DEFAULT,
+        TimeUnit.MILLISECONDS);
+    LOG.trace("Starting Container Lease Manager.");
+    containerLeaseManager = new LeaseManager<>(containerCreationLeaseTimeout);
+    containerLeaseManager.start();
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public ContainerInfo getContainer(final String containerName) throws
+      IOException {
+    ContainerInfo containerInfo;
+    lock.lock();
+    try {
+      byte[] containerBytes = containerStore.get(containerName.getBytes(
+          encoding));
+      if (containerBytes == null) {
+        throw new SCMException(
+            "Specified key does not exist. key : " + containerName,
+            SCMException.ResultCodes.FAILED_TO_FIND_CONTAINER);
+      }
+
+      HddsProtos.SCMContainerInfo temp = HddsProtos.SCMContainerInfo.PARSER
+          .parseFrom(containerBytes);
+      containerInfo = ContainerInfo.fromProtobuf(temp);
+      return containerInfo;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<ContainerInfo> listContainer(String startName,
+      String prefixName, int count) throws IOException {
+    List<ContainerInfo> containerList = new ArrayList<>();
+    lock.lock();
+    try {
+      if (containerStore.isEmpty()) {
+        throw new IOException("No container exists in current db");
+      }
+      MetadataKeyFilter prefixFilter = new KeyPrefixFilter(prefixName);
+      byte[] startKey = startName == null ? null : DFSUtil.string2Bytes(
+          startName);
+      List<Map.Entry<byte[], byte[]>> range =
+          containerStore.getSequentialRangeKVs(startKey, count, prefixFilter);
+
+      // Transform the values into the pipelines.
+      // TODO: filter by container state
+      for (Map.Entry<byte[], byte[]> entry : range) {
+        ContainerInfo containerInfo =
+            ContainerInfo.fromProtobuf(
+                HddsProtos.SCMContainerInfo.PARSER.parseFrom(
+                    entry.getValue()));
+        Preconditions.checkNotNull(containerInfo);
+        containerList.add(containerInfo);
+      }
+    } finally {
+      lock.unlock();
+    }
+    return containerList;
+  }
+
+  /**
+   * Allocates a new container.
+   *
+   * @param replicationFactor - replication factor of the container.
+   * @param containerName - Name of the container.
+   * @param owner - The string name of the Service that owns this container.
+   * @return - Pipeline that makes up this container.
+   * @throws IOException - Exception
+   */
+  @Override
+  public ContainerInfo allocateContainer(
+      ReplicationType type,
+      ReplicationFactor replicationFactor,
+      final String containerName,
+      String owner)
+      throws IOException {
+    Preconditions.checkNotNull(containerName);
+    Preconditions.checkState(!containerName.isEmpty());
+
+    ContainerInfo containerInfo;
+    if (!nodeManager.isOutOfChillMode()) {
+      throw new SCMException(
+          "Unable to create container while in chill mode",
+          SCMException.ResultCodes.CHILL_MODE_EXCEPTION);
+    }
+
+    lock.lock();
+    try {
+      byte[] containerBytes = containerStore.get(containerName.getBytes(
+          encoding));
+      if (containerBytes != null) {
+        throw new SCMException(
+            "Specified container already exists. key : " + containerName,
+            SCMException.ResultCodes.CONTAINER_EXISTS);
+      }
+      containerInfo =
+          containerStateManager.allocateContainer(
+              pipelineSelector, type, replicationFactor, containerName,
+              owner);
+      containerStore.put(
+          containerName.getBytes(encoding), containerInfo.getProtobuf()
+              .toByteArray());
+    } finally {
+      lock.unlock();
+    }
+    return containerInfo;
+  }
+
+  /**
+   * Deletes a container from SCM.
+   *
+   * @param containerName - Container name
+   * @throws IOException if container doesn't exist or container store failed
+   *                     to delete the
+   *                     specified key.
+   */
+  @Override
+  public void deleteContainer(String containerName) throws IOException {
+    lock.lock();
+    try {
+      byte[] dbKey = containerName.getBytes(encoding);
+      byte[] containerBytes = containerStore.get(dbKey);
+      if (containerBytes == null) {
+        throw new SCMException(
+            "Failed to delete container " + containerName + ", reason : " +
+                "container doesn't exist.",
+            SCMException.ResultCodes.FAILED_TO_FIND_CONTAINER);
+      }
+      containerStore.delete(dbKey);
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc} Used by client to update container state on SCM.
+   */
+  @Override
+  public HddsProtos.LifeCycleState updateContainerState(
+      String containerName, HddsProtos.LifeCycleEvent event) throws
+      IOException {
+    ContainerInfo containerInfo;
+    lock.lock();
+    try {
+      byte[] dbKey = containerName.getBytes(encoding);
+      byte[] containerBytes = containerStore.get(dbKey);
+      if (containerBytes == null) {
+        throw new SCMException(
+            "Failed to update container state"
+                + containerName
+                + ", reason : container doesn't exist.",
+            SCMException.ResultCodes.FAILED_TO_FIND_CONTAINER);
+      }
+      containerInfo =
+          ContainerInfo.fromProtobuf(HddsProtos.SCMContainerInfo.PARSER
+              .parseFrom(containerBytes));
+
+      Preconditions.checkNotNull(containerInfo);
+      switch (event) {
+      case CREATE:
+        // Acquire lease on container
+        Lease<ContainerInfo> containerLease =
+            containerLeaseManager.acquire(containerInfo);
+        // Register callback to be executed in case of timeout
+        containerLease.registerCallBack(() -> {
+          updateContainerState(containerName,
+              HddsProtos.LifeCycleEvent.TIMEOUT);
+          return null;
+        });
+        break;
+      case CREATED:
+        // Release the lease on container
+        containerLeaseManager.release(containerInfo);
+        break;
+      case FINALIZE:
+        // TODO: we don't need a lease manager here for closing as the
+        // container report will include the container state after HDFS-13008
+        // If a client failed to update the container close state, DN container
+        // report from 3 DNs will be used to close the container eventually.
+        break;
+      case CLOSE:
+        break;
+      case UPDATE:
+        break;
+      case DELETE:
+        break;
+      case TIMEOUT:
+        break;
+      case CLEANUP:
+        break;
+      default:
+        throw new SCMException("Unsupported container LifeCycleEvent.",
+            FAILED_TO_CHANGE_CONTAINER_STATE);
+      }
+      // If the below updateContainerState call fails, we should revert the
+      // changes made in switch case.
+      // Like releasing the lease in case of BEGIN_CREATE.
+      ContainerInfo updatedContainer = containerStateManager
+          .updateContainerState(containerInfo, event);
+      containerStore.put(dbKey, updatedContainer.getProtobuf().toByteArray());
+      return updatedContainer.getState();
+    } catch (LeaseException e) {
+      throw new IOException("Lease Exception.", e);
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Returns the container State Manager.
+   *
+   * @return ContainerStateManager
+   */
+  @Override
+  public ContainerStateManager getStateManager() {
+    return containerStateManager;
+  }
+
+  /**
+   * Process container report from Datanode.
+   * <p>
+   * Processing follows a very simple logic for time being.
+   * <p>
+   * 1. Datanodes report the current State -- denoted by the datanodeState
+   * <p>
+   * 2. We are the older SCM state from the Database -- denoted by
+   * the knownState.
+   * <p>
+   * 3. We copy the usage etc. from currentState to newState and log that
+   * newState to the DB. This allows us SCM to bootup again and read the
+   * state of the world from the DB, and then reconcile the state from
+   * container reports, when they arrive.
+   *
+   * @param reports Container report
+   */
+  @Override
+  public void processContainerReports(ContainerReportsRequestProto reports)
+      throws IOException {
+    List<StorageContainerDatanodeProtocolProtos.ContainerInfo>
+        containerInfos = reports.getReportsList();
+    containerSupervisor.handleContainerReport(reports);
+    for (StorageContainerDatanodeProtocolProtos.ContainerInfo datanodeState :
+        containerInfos) {
+      byte[] dbKey = datanodeState.getContainerNameBytes().toByteArray();
+      lock.lock();
+      try {
+        byte[] containerBytes = containerStore.get(dbKey);
+        if (containerBytes != null) {
+          HddsProtos.SCMContainerInfo knownState =
+              HddsProtos.SCMContainerInfo.PARSER.parseFrom(containerBytes);
+
+          HddsProtos.SCMContainerInfo newState =
+              reconcileState(datanodeState, knownState);
+
+          // FIX ME: This can be optimized, we write twice to memory, where a
+          // single write would work well.
+          //
+          // We need to write this to DB again since the closed only write
+          // the updated State.
+          containerStore.put(dbKey, newState.toByteArray());
+
+          // If the container is closed, then state is already written to SCM
+          // DB.TODO: So can we can write only once to DB.
+          if (closeContainerIfNeeded(newState)) {
+            LOG.info("Closing the Container: {}", newState.getContainerName());
+          }
+        } else {
+          // Container not found in our container db.
+          LOG.error("Error while processing container report from datanode :" +
+                  " {}, for container: {}, reason: container doesn't exist in" +
+                  "container database.", reports.getDatanodeDetails(),
+              datanodeState.getContainerName());
+        }
+      } finally {
+        lock.unlock();
+      }
+    }
+  }
+
+  /**
+   * Reconciles the state from Datanode with the state in SCM.
+   *
+   * @param datanodeState - State from the Datanode.
+   * @param knownState - State inside SCM.
+   * @return new SCM State for this container.
+   */
+  private HddsProtos.SCMContainerInfo reconcileState(
+      StorageContainerDatanodeProtocolProtos.ContainerInfo datanodeState,
+      HddsProtos.SCMContainerInfo knownState) {
+    HddsProtos.SCMContainerInfo.Builder builder =
+        HddsProtos.SCMContainerInfo.newBuilder();
+    builder.setContainerName(knownState.getContainerName());
+    builder.setPipeline(knownState.getPipeline());
+    // If used size is greater than allocated size, we will be updating
+    // allocated size with used size. This update is done as a fallback
+    // mechanism in case SCM crashes without properly updating allocated
+    // size. Correct allocated value will be updated by
+    // ContainerStateManager during SCM shutdown.
+    long usedSize = datanodeState.getUsed();
+    long allocated = knownState.getAllocatedBytes() > usedSize ?
+        knownState.getAllocatedBytes() : usedSize;
+    builder.setAllocatedBytes(allocated);
+    builder.setUsedBytes(usedSize);
+    builder.setNumberOfKeys(datanodeState.getKeyCount());
+    builder.setState(knownState.getState());
+    builder.setStateEnterTime(knownState.getStateEnterTime());
+    builder.setContainerID(knownState.getContainerID());
+    if (knownState.getOwner() != null) {
+      builder.setOwner(knownState.getOwner());
+    }
+    return builder.build();
+  }
+
+  /**
+   * Queues the close container command, to datanode and writes the new state
+   * to container DB.
+   * <p>
+   * TODO : Remove this 2 ContainerInfo definitions. It is brain dead to have
+   * one protobuf in one file and another definition in another file.
+   *
+   * @param newState - This is the state we maintain in SCM.
+   * @throws IOException
+   */
+  private boolean closeContainerIfNeeded(HddsProtos.SCMContainerInfo newState)
+      throws IOException {
+    float containerUsedPercentage = 1.0f *
+        newState.getUsedBytes() / this.size;
+
+    ContainerInfo scmInfo = getContainer(newState.getContainerName());
+    if (containerUsedPercentage >= containerCloseThreshold
+        && !isClosed(scmInfo)) {
+      // We will call closer till get to the closed state.
+      // That is SCM will make this call repeatedly until we reach the closed
+      // state.
+      closer.close(newState);
+
+      if (shouldClose(scmInfo)) {
+        // This event moves the Container from Open to Closing State, this is
+        // a state inside SCM. This is the desired state that SCM wants this
+        // container to reach. We will know that a container has reached the
+        // closed state from container reports. This state change should be
+        // invoked once and only once.
+        HddsProtos.LifeCycleState state = updateContainerState(
+            scmInfo.getContainerName(),
+            HddsProtos.LifeCycleEvent.FINALIZE);
+        if (state != HddsProtos.LifeCycleState.CLOSING) {
+          LOG.error("Failed to close container {}, reason : Not able " +
+                  "to " +
+                  "update container state, current container state: {}.",
+              newState.getContainerName(), state);
+          return false;
+        }
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * In Container is in closed state, if it is in closed, Deleting or Deleted
+   * State.
+   *
+   * @param info - ContainerInfo.
+   * @return true if is in open state, false otherwise
+   */
+  private boolean shouldClose(ContainerInfo info) {
+    return info.getState() == HddsProtos.LifeCycleState.OPEN;
+  }
+
+  private boolean isClosed(ContainerInfo info) {
+    return info.getState() == HddsProtos.LifeCycleState.CLOSED;
+  }
+
+  @VisibleForTesting
+  public ContainerCloser getCloser() {
+    return closer;
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated with it.
+   * If the stream is
+   * already closed then invoking this method has no effect.
+   * <p>
+   * <p>As noted in {@link AutoCloseable#close()}, cases where the close may
+   * fail require careful
+   * attention. It is strongly advised to relinquish the underlying resources
+   * and to internally
+   * <em>mark</em> the {@code Closeable} as closed, prior to throwing the
+   * {@code IOException}.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+    if (containerLeaseManager != null) {
+      containerLeaseManager.shutdown();
+    }
+    if (containerStateManager != null) {
+      flushContainerInfo();
+      containerStateManager.close();
+    }
+    if (containerStore != null) {
+      containerStore.close();
+    }
+  }
+
+  /**
+   * Since allocatedBytes of a container is only in memory, stored in
+   * containerStateManager, when closing ContainerMapping, we need to update
+   * this in the container store.
+   *
+   * @throws IOException on failure.
+   */
+  @VisibleForTesting
+  public void flushContainerInfo() throws IOException {
+    List<ContainerInfo> containers = containerStateManager.getAllContainers();
+    List<String> failedContainers = new ArrayList<>();
+    for (ContainerInfo info : containers) {
+      // even if some container updated failed, others can still proceed
+      try {
+        byte[] dbKey = info.getContainerName().getBytes(encoding);
+        byte[] containerBytes = containerStore.get(dbKey);
+        // TODO : looks like when a container is deleted, the container is
+        // removed from containerStore but not containerStateManager, so it can
+        // return info of a deleted container. may revisit this in the future,
+        // for now, just skip a not-found container
+        if (containerBytes != null) {
+          HddsProtos.SCMContainerInfo oldInfoProto =
+              HddsProtos.SCMContainerInfo.PARSER.parseFrom(containerBytes);
+          ContainerInfo oldInfo = ContainerInfo.fromProtobuf(oldInfoProto);
+          ContainerInfo newInfo = new ContainerInfo.Builder()
+              .setAllocatedBytes(info.getAllocatedBytes())
+              .setContainerName(oldInfo.getContainerName())
+              .setNumberOfKeys(oldInfo.getNumberOfKeys())
+              .setOwner(oldInfo.getOwner())
+              .setPipeline(oldInfo.getPipeline())
+              .setState(oldInfo.getState())
+              .setUsedBytes(oldInfo.getUsedBytes())
+              .build();
+          containerStore.put(dbKey, newInfo.getProtobuf().toByteArray());
+        } else {
+          LOG.debug("Container state manager has container {} but not found " +
+                  "in container store, a deleted container?",
+              info.getContainerName());
+        }
+      } catch (IOException ioe) {
+        failedContainers.add(info.getContainerName());
+      }
+    }
+    if (!failedContainers.isEmpty()) {
+      throw new IOException("Error in flushing container info from container " +
+          "state manager: " + failedContainers);
+    }
+  }
+
+  @VisibleForTesting
+  public MetadataStore getContainerStore() {
+    return containerStore;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java
new file mode 100644
index 0000000..227eca0
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java
@@ -0,0 +1,456 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.states.ContainerState;
+import org.apache.hadoop.hdds.scm.container.states.ContainerStateMap;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.common.statemachine
+    .InvalidStateTransitionException;
+import org.apache.hadoop.ozone.common.statemachine.StateMachine;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.NavigableSet;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_CHANGE_CONTAINER_STATE;
+
+/**
+ * A container state manager keeps track of container states and returns
+ * containers that match various queries.
+ * <p>
+ * This state machine is driven by a combination of server and client actions.
+ * <p>
+ * This is how a create container happens: 1. When a container is created, the
+ * Server(or SCM) marks that Container as ALLOCATED state. In this state, SCM
+ * has chosen a pipeline for container to live on. However, the container is not
+ * created yet. This container along with the pipeline is returned to the
+ * client.
+ * <p>
+ * 2. The client when it sees the Container state as ALLOCATED understands that
+ * container needs to be created on the specified pipeline. The client lets the
+ * SCM know that saw this flag and is initiating the on the data nodes.
+ * <p>
+ * This is done by calling into notifyObjectCreation(ContainerName,
+ * BEGIN_CREATE) flag. When SCM gets this call, SCM puts the container state
+ * into CREATING. All this state means is that SCM told Client to create a
+ * container and client saw that request.
+ * <p>
+ * 3. Then client makes calls to datanodes directly, asking the datanodes to
+ * create the container. This is done with the help of pipeline that supports
+ * this container.
+ * <p>
+ * 4. Once the creation of the container is complete, the client will make
+ * another call to the SCM, this time specifying the containerName and the
+ * COMPLETE_CREATE as the Event.
+ * <p>
+ * 5. With COMPLETE_CREATE event, the container moves to an Open State. This is
+ * the state when clients can write to a container.
+ * <p>
+ * 6. If the client does not respond with the COMPLETE_CREATE event with a
+ * certain time, the state machine times out and triggers a delete operation of
+ * the container.
+ * <p>
+ * Please see the function initializeStateMachine below to see how this looks in
+ * code.
+ * <p>
+ * Reusing existing container :
+ * <p>
+ * The create container call is not made all the time, the system tries to use
+ * open containers as much as possible. So in those cases, it looks thru the
+ * list of open containers and will return containers that match the specific
+ * signature.
+ * <p>
+ * Please note : Logically there are 3 separate state machines in the case of
+ * containers.
+ * <p>
+ * The Create State Machine -- Commented extensively above.
+ * <p>
+ * Open/Close State Machine - Once the container is in the Open State,
+ * eventually it will be closed, once sufficient data has been written to it.
+ * <p>
+ * TimeOut Delete Container State Machine - if the container creating times out,
+ * then Container State manager decides to delete the container.
+ */
+public class ContainerStateManager implements Closeable {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerStateManager.class);
+
+  private final StateMachine<HddsProtos.LifeCycleState,
+      HddsProtos.LifeCycleEvent> stateMachine;
+
+  private final long containerSize;
+  private final ConcurrentHashMap<ContainerState, ContainerID> lastUsedMap;
+  private final ContainerStateMap containers;
+  private final AtomicLong containerCount;
+
+  /**
+   * Constructs a Container State Manager that tracks all containers owned by
+   * SCM for the purpose of allocation of blocks.
+   * <p>
+   * TODO : Add Container Tags so we know which containers are owned by SCM.
+   */
+  @SuppressWarnings("unchecked")
+  public ContainerStateManager(Configuration configuration,
+      Mapping containerMapping) {
+
+    // Initialize the container state machine.
+    Set<HddsProtos.LifeCycleState> finalStates = new HashSet();
+
+    // These are the steady states of a container.
+    finalStates.add(LifeCycleState.OPEN);
+    finalStates.add(LifeCycleState.CLOSED);
+    finalStates.add(LifeCycleState.DELETED);
+
+    this.stateMachine = new StateMachine<>(LifeCycleState.ALLOCATED,
+        finalStates);
+    initializeStateMachine();
+
+    this.containerSize = OzoneConsts.GB * configuration.getInt(
+        ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB,
+        ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT);
+
+    lastUsedMap = new ConcurrentHashMap<>();
+    containerCount = new AtomicLong(0);
+    containers = new ContainerStateMap();
+    loadExistingContainers(containerMapping);
+  }
+
+  private void loadExistingContainers(Mapping containerMapping) {
+
+    List<ContainerInfo> containerList;
+    try {
+      containerList = containerMapping.listContainer(null,
+          null, Integer.MAX_VALUE);
+
+      // if there are no container to load, let us return.
+      if (containerList == null || containerList.size() == 0) {
+        LOG.info("No containers to load for this cluster.");
+        return;
+      }
+    } catch (IOException e) {
+      if (!e.getMessage().equals("No container exists in current db")) {
+        LOG.error("Could not list the containers", e);
+      }
+      return;
+    }
+
+    try {
+      long maxID = 0;
+      for (ContainerInfo container : containerList) {
+        containers.addContainer(container);
+
+        if (maxID < container.getContainerID()) {
+          maxID = container.getContainerID();
+        }
+
+        containerCount.set(maxID);
+      }
+    } catch (SCMException ex) {
+      LOG.error("Unable to create a container information. ", ex);
+      // Fix me, what is the proper shutdown procedure for SCM ??
+      // System.exit(1) // Should we exit here?
+    }
+  }
+
+  /**
+   * Return the info of all the containers kept by the in-memory mapping.
+   *
+   * @return the list of all container info.
+   */
+  public List<ContainerInfo> getAllContainers() {
+    List<ContainerInfo> list = new ArrayList<>();
+
+    //No Locking needed since the return value is an immutable map.
+    containers.getContainerMap().forEach((key, value) -> list.add(value));
+    return list;
+  }
+
+  /*
+   *
+   * Event and State Transition Mapping:
+   *
+   * State: ALLOCATED ---------------> CREATING
+   * Event:                CREATE
+   *
+   * State: CREATING  ---------------> OPEN
+   * Event:               CREATED
+   *
+   * State: OPEN      ---------------> CLOSING
+   * Event:               FINALIZE
+   *
+   * State: CLOSING   ---------------> CLOSED
+   * Event:                CLOSE
+   *
+   * State: CLOSED   ----------------> DELETING
+   * Event:                DELETE
+   *
+   * State: DELETING ----------------> DELETED
+   * Event:               CLEANUP
+   *
+   * State: CREATING  ---------------> DELETING
+   * Event:               TIMEOUT
+   *
+   *
+   * Container State Flow:
+   *
+   * [ALLOCATED]------->[CREATING]--------->[OPEN]---------->[CLOSING]------->[CLOSED]
+   *            (CREATE)     |    (CREATED)       (FINALIZE)          (CLOSE)    |
+   *                         |                                                   |
+   *                         |                                                   |
+   *                         |(TIMEOUT)                                  (DELETE)|
+   *                         |                                                   |
+   *                         +------------------> [DELETING] <-------------------+
+   *                                                   |
+   *                                                   |
+   *                                          (CLEANUP)|
+   *                                                   |
+   *                                               [DELETED]
+   */
+  private void initializeStateMachine() {
+    stateMachine.addTransition(LifeCycleState.ALLOCATED,
+        LifeCycleState.CREATING,
+        LifeCycleEvent.CREATE);
+
+    stateMachine.addTransition(LifeCycleState.CREATING,
+        LifeCycleState.OPEN,
+        LifeCycleEvent.CREATED);
+
+    stateMachine.addTransition(LifeCycleState.OPEN,
+        LifeCycleState.CLOSING,
+        LifeCycleEvent.FINALIZE);
+
+    stateMachine.addTransition(LifeCycleState.CLOSING,
+        LifeCycleState.CLOSED,
+        LifeCycleEvent.CLOSE);
+
+    stateMachine.addTransition(LifeCycleState.CLOSED,
+        LifeCycleState.DELETING,
+        LifeCycleEvent.DELETE);
+
+    stateMachine.addTransition(LifeCycleState.CREATING,
+        LifeCycleState.DELETING,
+        LifeCycleEvent.TIMEOUT);
+
+    stateMachine.addTransition(LifeCycleState.DELETING,
+        LifeCycleState.DELETED,
+        LifeCycleEvent.CLEANUP);
+  }
+
+  /**
+   * allocates a new container based on the type, replication etc.
+   *
+   * @param selector -- Pipeline selector class.
+   * @param type -- Replication type.
+   * @param replicationFactor - Replication replicationFactor.
+   * @param containerName - Container Name.
+   * @return Container Info.
+   * @throws IOException  on Failure.
+   */
+  public ContainerInfo allocateContainer(PipelineSelector selector, HddsProtos
+      .ReplicationType type, HddsProtos.ReplicationFactor replicationFactor,
+      final String containerName, String owner) throws
+      IOException {
+
+    Pipeline pipeline = selector.getReplicationPipeline(type,
+        replicationFactor, containerName);
+
+    Preconditions.checkNotNull(pipeline, "Pipeline type=%s/"
+        + "replication=%s couldn't be found for the new container. "
+        + "Do you have enough nodes?", type, replicationFactor);
+
+    ContainerInfo containerInfo = new ContainerInfo.Builder()
+        .setContainerName(containerName)
+        .setState(HddsProtos.LifeCycleState.ALLOCATED)
+        .setPipeline(pipeline)
+        // This is bytes allocated for blocks inside container, not the
+        // container size
+        .setAllocatedBytes(0)
+        .setUsedBytes(0)
+        .setNumberOfKeys(0)
+        .setStateEnterTime(Time.monotonicNow())
+        .setOwner(owner)
+        .setContainerID(containerCount.incrementAndGet())
+        .build();
+    Preconditions.checkNotNull(containerInfo);
+    containers.addContainer(containerInfo);
+    LOG.trace("New container allocated: {}", containerInfo);
+    return containerInfo;
+  }
+
+  /**
+   * Update the Container State to the next state.
+   *
+   * @param info - ContainerInfo
+   * @param event - LifeCycle Event
+   * @return Updated ContainerInfo.
+   * @throws SCMException  on Failure.
+   */
+  public ContainerInfo updateContainerState(ContainerInfo
+      info, HddsProtos.LifeCycleEvent event) throws SCMException {
+    LifeCycleState newState;
+    try {
+      newState = this.stateMachine.getNextState(info.getState(), event);
+    } catch (InvalidStateTransitionException ex) {
+      String error = String.format("Failed to update container state %s, " +
+              "reason: invalid state transition from state: %s upon " +
+              "event: %s.",
+          info.getPipeline().getContainerName(), info.getState(), event);
+      LOG.error(error);
+      throw new SCMException(error, FAILED_TO_CHANGE_CONTAINER_STATE);
+    }
+
+    // This is a post condition after executing getNextState.
+    Preconditions.checkNotNull(newState);
+    containers.updateState(info, info.getState(), newState);
+    return containers.getContainerInfo(info);
+  }
+
+  /**
+   * Update the container State.
+   * @param info - Container Info
+   * @return  ContainerInfo
+   * @throws SCMException - on Error.
+   */
+  public ContainerInfo updateContainerInfo(ContainerInfo info)
+      throws SCMException {
+    containers.updateContainerInfo(info);
+    return containers.getContainerInfo(info);
+  }
+
+
+  /**
+   * Return a container matching the attributes specified.
+   *
+   * @param size - Space needed in the Container.
+   * @param owner - Owner of the container - A specific nameservice.
+   * @param type - Replication Type {StandAlone, Ratis}
+   * @param factor - Replication Factor {ONE, THREE}
+   * @param state - State of the Container-- {Open, Allocated etc.}
+   * @return ContainerInfo, null if there is no match found.
+   */
+  public ContainerInfo getMatchingContainer(final long size,
+      String owner, ReplicationType type, ReplicationFactor factor,
+      LifeCycleState state) {
+
+    // Find containers that match the query spec, if no match return null.
+    NavigableSet<ContainerID> matchingSet =
+        containers.getMatchingContainerIDs(state, owner, factor, type);
+    if (matchingSet == null || matchingSet.size() == 0) {
+      return null;
+    }
+
+    // Get the last used container and find container above the last used
+    // container ID.
+    ContainerState key = new ContainerState(owner, type, factor);
+    ContainerID lastID = lastUsedMap.get(key);
+    if(lastID == null) {
+      lastID = matchingSet.first();
+    }
+
+    // There is a small issue here. The first time, we will skip the first
+    // container. But in most cases it will not matter.
+    NavigableSet<ContainerID> resultSet = matchingSet.tailSet(lastID, false);
+    if (resultSet.size() == 0) {
+      resultSet = matchingSet;
+    }
+
+    ContainerInfo selectedContainer =
+        findContainerWithSpace(size, resultSet, owner);
+    if (selectedContainer == null) {
+
+      // If we did not find any space in the tailSet, we need to look for
+      // space in the headset, we need to pass true to deal with the
+      // situation that we have a lone container that has space. That is we
+      // ignored the last used container under the assumption we can find
+      // other containers with space, but if have a single container that is
+      // not true. Hence we need to include the last used container as the
+      // last element in the sorted set.
+
+      resultSet = matchingSet.headSet(lastID, true);
+      selectedContainer = findContainerWithSpace(size, resultSet, owner);
+    }
+    // Update the allocated Bytes on this container.
+    if(selectedContainer != null) {
+      selectedContainer.updateAllocatedBytes(size);
+    }
+    return selectedContainer;
+
+  }
+
+  private ContainerInfo findContainerWithSpace(long size,
+      NavigableSet<ContainerID> searchSet, String owner) {
+    // Get the container with space to meet our request.
+    for (ContainerID id : searchSet) {
+      ContainerInfo containerInfo = containers.getContainerInfo(id.getId());
+      if (containerInfo.getAllocatedBytes() + size <= this.containerSize) {
+        containerInfo.updateLastUsedTime();
+
+        ContainerState key = new ContainerState(owner,
+            containerInfo.getPipeline().getType(),
+            containerInfo.getPipeline().getFactor());
+        lastUsedMap.put(key, containerInfo.containerID());
+        return containerInfo;
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Returns a set of ContainerIDs that match the Container.
+   *
+   * @param owner  Owner of the Containers.
+   * @param type - Replication Type of the containers
+   * @param factor - Replication factor of the containers.
+   * @param state - Current State, like Open, Close etc.
+   * @return Set of containers that match the specific query parameters.
+   */
+  public NavigableSet<ContainerID> getMatchingContainerIDs(
+      String owner, ReplicationType type, ReplicationFactor factor,
+      LifeCycleState state) {
+    return containers.getMatchingContainerIDs(state, owner,
+        factor, type);
+  }
+
+  @Override
+  public void close() throws IOException {
+  }
+
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/Mapping.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/Mapping.java
new file mode 100644
index 0000000..c949c6c
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/Mapping.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Mapping class contains the mapping from a name to a pipeline mapping. This is
+ * used by SCM when allocating new locations and when looking up a key.
+ */
+public interface Mapping extends Closeable {
+  /**
+   * Returns the ContainerInfo from the container name.
+   *
+   * @param containerName - Name
+   * @return - ContainerInfo such as creation state and the pipeline.
+   * @throws IOException
+   */
+  ContainerInfo getContainer(String containerName) throws IOException;
+
+  /**
+   * Returns containers under certain conditions.
+   * Search container names from start name(exclusive),
+   * and use prefix name to filter the result. The max
+   * size of the searching range cannot exceed the
+   * value of count.
+   *
+   * @param startName start name, if null, start searching at the head.
+   * @param prefixName prefix name, if null, then filter is disabled.
+   * @param count count, if count < 0, the max size is unlimited.(
+   *              Usually the count will be replace with a very big
+   *              value instead of being unlimited in case the db is very big)
+   *
+   * @return a list of container.
+   * @throws IOException
+   */
+  List<ContainerInfo> listContainer(String startName, String prefixName,
+      int count) throws IOException;
+
+  /**
+   * Allocates a new container for a given keyName and replication factor.
+   *
+   * @param replicationFactor - replication factor of the container.
+   * @param containerName - Name.
+   * @param owner
+   * @return - Container Info.
+   * @throws IOException
+   */
+  ContainerInfo allocateContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor replicationFactor,
+      String containerName, String owner) throws IOException;
+
+  /**
+   * Deletes a container from SCM.
+   *
+   * @param containerName - Container Name
+   * @throws IOException
+   */
+  void deleteContainer(String containerName) throws IOException;
+
+  /**
+   * Update container state.
+   * @param containerName - Container Name
+   * @param event - container life cycle event
+   * @return - new container state
+   * @throws IOException
+   */
+  HddsProtos.LifeCycleState updateContainerState(String containerName,
+      HddsProtos.LifeCycleEvent event) throws IOException;
+
+  /**
+   * Returns the container State Manager.
+   * @return ContainerStateManager
+   */
+  ContainerStateManager getStateManager();
+
+  /**
+   * Process container report from Datanode.
+   *
+   * @param reports Container report
+   */
+  void processContainerReports(ContainerReportsRequestProto reports)
+      throws IOException;
+
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/ContainerCloser.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/ContainerCloser.java
new file mode 100644
index 0000000..b5d4da9
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/ContainerCloser.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.scm.container.closer;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_CONTAINER_REPORT_INTERVAL;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_CONTAINER_REPORT_INTERVAL_DEFAULT;
+
+/**
+ * A class that manages closing of containers. This allows transition from a
+ * open but full container to a closed container, to which no data is written.
+ */
+public class ContainerCloser {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerCloser.class);
+  private static final long MULTIPLIER = 3L;
+  private static final int CLEANUP_WATER_MARK = 1000;
+  private final NodeManager nodeManager;
+  private final Map<String, Long> commandIssued;
+  private final Configuration configuration;
+  private final AtomicInteger mapCount;
+  private final long reportInterval;
+  private final AtomicInteger threadRunCount;
+  private final AtomicBoolean isRunning;
+
+  /**
+   * Constructs the ContainerCloser class.
+   *
+   * @param nodeManager - NodeManager
+   * @param conf -   Configuration
+   */
+  public ContainerCloser(NodeManager nodeManager, Configuration conf) {
+    Preconditions.checkNotNull(nodeManager);
+    Preconditions.checkNotNull(conf);
+    this.nodeManager = nodeManager;
+    this.configuration = conf;
+    this.commandIssued = new ConcurrentHashMap<>();
+    this.mapCount = new AtomicInteger(0);
+    this.threadRunCount = new AtomicInteger(0);
+    this.isRunning = new AtomicBoolean(false);
+    this.reportInterval = this.configuration.getTimeDuration(
+        OZONE_CONTAINER_REPORT_INTERVAL,
+        OZONE_CONTAINER_REPORT_INTERVAL_DEFAULT, TimeUnit.SECONDS);
+    Preconditions.checkState(this.reportInterval > 0,
+        "report interval has to be greater than 0");
+  }
+
+  @VisibleForTesting
+  public static int getCleanupWaterMark() {
+    return CLEANUP_WATER_MARK;
+  }
+
+  /**
+   * Sends a Container Close command to the data nodes where this container
+   * lives.
+   *
+   * @param info - ContainerInfo.
+   */
+  public void close(HddsProtos.SCMContainerInfo info) {
+
+    if (commandIssued.containsKey(info.getContainerName())) {
+      // We check if we issued a close command in last 3 * reportInterval secs.
+      long commandQueueTime = commandIssued.get(info.getContainerName());
+      long currentTime = TimeUnit.MILLISECONDS.toSeconds(Time.monotonicNow());
+      if (currentTime > commandQueueTime + (MULTIPLIER * reportInterval)) {
+        commandIssued.remove(info.getContainerName());
+        mapCount.decrementAndGet();
+      } else {
+        // Ignore this request, since we just issued a close command. We
+        // should wait instead of sending a command to datanode again.
+        return;
+      }
+    }
+
+    // if we reached here, it means that we have not issued a command to the
+    // data node in last (3 times report interval). We are presuming that is
+    // enough time to close the container. Let us go ahead and queue a close
+    // to all the datanodes that participate in the container.
+    //
+    // Three important things to note here:
+    //
+    // 1. It is ok to send this command multiple times to a datanode. Close
+    // container is an idempotent command, if the container is already closed
+    // then we have no issues.
+    //
+    // 2. The container close command is issued to all datanodes. But
+    // depending on the pipeline type, some of the datanodes might ignore it.
+    //
+    // 3. SCM will see that datanode is closed from container reports, but it
+    // is possible that datanodes might get close commands since
+    // this queue can be emptied by a datanode after a close report is send
+    // to SCM. In that case also, data node will ignore this command.
+
+    HddsProtos.Pipeline pipeline = info.getPipeline();
+    for (HddsProtos.DatanodeDetailsProto datanodeDetails :
+        pipeline.getPipelineChannel().getMembersList()) {
+      nodeManager.addDatanodeCommand(
+          DatanodeDetails.getFromProtoBuf(datanodeDetails).getUuid(),
+          new CloseContainerCommand(info.getContainerName()));
+    }
+    if (!commandIssued.containsKey(info.getContainerName())) {
+      commandIssued.put(info.getContainerName(),
+          TimeUnit.MILLISECONDS.toSeconds(Time.monotonicNow()));
+      mapCount.incrementAndGet();
+    }
+    // run the hash map cleaner thread if needed, non-blocking call.
+    runCleanerThreadIfNeeded();
+  }
+
+  private void runCleanerThreadIfNeeded() {
+    // Let us check if we should run a cleaner thread, not using map.size
+    // since it runs a loop in the case of the concurrentMap.
+    if (mapCount.get() > CLEANUP_WATER_MARK &&
+        isRunning.compareAndSet(false, true)) {
+      Runnable entryCleaner = () -> {
+        LOG.debug("Starting close container Hash map cleaner.");
+        try {
+          for (Map.Entry<String, Long> entry : commandIssued.entrySet()) {
+            long commandQueueTime = entry.getValue();
+            if (commandQueueTime + (MULTIPLIER * reportInterval) >
+                TimeUnit.MILLISECONDS.toSeconds(Time.monotonicNow())) {
+
+              // It is possible for this remove to fail due to race conditions.
+              // No big deal we will cleanup next time.
+              commandIssued.remove(entry.getKey());
+              mapCount.decrementAndGet();
+            }
+          }
+          isRunning.compareAndSet(true, false);
+          LOG.debug("Finished running, close container Hash map cleaner.");
+        } catch (Exception ex) {
+          LOG.error("Unable to finish cleaning the closed containers map.", ex);
+        }
+      };
+
+      // Launch the cleaner thread when we need instead of having a daemon
+      // thread that is sleeping all the time. We need to set the Daemon to
+      // true to avoid blocking clean exits.
+      Thread cleanerThread = new ThreadFactoryBuilder()
+          .setDaemon(true)
+          .setNameFormat("Closed Container Cleaner Thread - %d")
+          .build().newThread(entryCleaner);
+      threadRunCount.incrementAndGet();
+      cleanerThread.start();
+    }
+  }
+
+  @VisibleForTesting
+  public int getThreadRunCount() {
+    return threadRunCount.get();
+  }
+
+  @VisibleForTesting
+  public int getCloseCount() {
+    return mapCount.get();
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/package-info.java
new file mode 100644
index 0000000..ee02bbd
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/closer/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+/**
+ * This package has class that close a container. That is move a container from
+ * open state to close state.
+ */
+package org.apache.hadoop.hdds.scm.container.closer;
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
new file mode 100644
index 0000000..3f8d056
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container;
+/**
+ * This package contains routines to manage the container location and
+ * mapping inside SCM
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/ContainerPlacementPolicy.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/ContainerPlacementPolicy.java
new file mode 100644
index 0000000..5d91ac5
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/ContainerPlacementPolicy.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.algorithms;
+
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * A ContainerPlacementPolicy support choosing datanodes to build replication
+ * pipeline with specified constraints.
+ */
+public interface ContainerPlacementPolicy {
+
+  /**
+   * Given the replication factor and size required, return set of datanodes
+   * that satisfy the nodes and size requirement.
+   * @param nodesRequired - number of datanodes required.
+   * @param sizeRequired - size required for the container or block.
+   * @return list of datanodes chosen.
+   * @throws IOException
+   */
+  List<DatanodeDetails> chooseDatanodes(int nodesRequired, long sizeRequired)
+      throws IOException;
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMCommonPolicy.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMCommonPolicy.java
new file mode 100644
index 0000000..0a595d5
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMCommonPolicy.java
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.algorithms;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+import java.util.stream.Collectors;
+
+/**
+ * SCM CommonPolicy implements a set of invariants which are common
+ * for all container placement policies, acts as the repository of helper
+ * functions which are common to placement policies.
+ */
+public abstract class SCMCommonPolicy implements ContainerPlacementPolicy {
+  @VisibleForTesting
+  static final Logger LOG =
+      LoggerFactory.getLogger(SCMCommonPolicy.class);
+  private final NodeManager nodeManager;
+  private final Random rand;
+  private final Configuration conf;
+
+  /**
+   * Constructs SCM Common Policy Class.
+   *
+   * @param nodeManager NodeManager
+   * @param conf Configuration class.
+   */
+  public SCMCommonPolicy(NodeManager nodeManager, Configuration conf) {
+    this.nodeManager = nodeManager;
+    this.rand = new Random();
+    this.conf = conf;
+  }
+
+  /**
+   * Return node manager.
+   *
+   * @return node manager
+   */
+  public NodeManager getNodeManager() {
+    return nodeManager;
+  }
+
+  /**
+   * Returns the Random Object.
+   *
+   * @return rand
+   */
+  public Random getRand() {
+    return rand;
+  }
+
+  /**
+   * Get Config.
+   *
+   * @return Configuration
+   */
+  public Configuration getConf() {
+    return conf;
+  }
+
+  /**
+   * Given the replication factor and size required, return set of datanodes
+   * that satisfy the nodes and size requirement.
+   * <p>
+   * Here are some invariants of container placement.
+   * <p>
+   * 1. We place containers only on healthy nodes.
+   * 2. We place containers on nodes with enough space for that container.
+   * 3. if a set of containers are requested, we either meet the required
+   * number of nodes or we fail that request.
+   *
+   * @param nodesRequired - number of datanodes required.
+   * @param sizeRequired - size required for the container or block.
+   * @return list of datanodes chosen.
+   * @throws SCMException SCM exception.
+   */
+
+  public List<DatanodeDetails> chooseDatanodes(int nodesRequired, final long
+      sizeRequired) throws SCMException {
+    List<DatanodeDetails> healthyNodes =
+        nodeManager.getNodes(HddsProtos.NodeState.HEALTHY);
+    String msg;
+    if (healthyNodes.size() == 0) {
+      msg = "No healthy node found to allocate container.";
+      LOG.error(msg);
+      throw new SCMException(msg, SCMException.ResultCodes
+          .FAILED_TO_FIND_HEALTHY_NODES);
+    }
+
+    if (healthyNodes.size() < nodesRequired) {
+      msg = String.format("Not enough healthy nodes to allocate container. %d "
+              + " datanodes required. Found %d",
+          nodesRequired, healthyNodes.size());
+      LOG.error(msg);
+      throw new SCMException(msg,
+          SCMException.ResultCodes.FAILED_TO_FIND_SUITABLE_NODE);
+    }
+    List<DatanodeDetails> healthyList = healthyNodes.stream().filter(d ->
+        hasEnoughSpace(d, sizeRequired)).collect(Collectors.toList());
+
+    if (healthyList.size() < nodesRequired) {
+      msg = String.format("Unable to find enough nodes that meet the space " +
+              "requirement of %d bytes in healthy node set." +
+              " Nodes required: %d Found: %d",
+          sizeRequired, nodesRequired, healthyList.size());
+      LOG.error(msg);
+      throw new SCMException(msg,
+          SCMException.ResultCodes.FAILED_TO_FIND_NODES_WITH_SPACE);
+    }
+
+    return healthyList;
+  }
+
+  /**
+   * Returns true if this node has enough space to meet our requirement.
+   *
+   * @param datanodeDetails DatanodeDetails
+   * @return true if we have enough space.
+   */
+  private boolean hasEnoughSpace(DatanodeDetails datanodeDetails,
+                                 long sizeRequired) {
+    SCMNodeMetric nodeMetric = nodeManager.getNodeStat(datanodeDetails);
+    return (nodeMetric != null) && nodeMetric.get().getRemaining()
+        .hasResources(sizeRequired);
+  }
+
+  /**
+   * This function invokes the derived classes chooseNode Function to build a
+   * list of nodes. Then it verifies that invoked policy was able to return
+   * expected number of nodes.
+   *
+   * @param nodesRequired - Nodes Required
+   * @param healthyNodes - List of Nodes in the result set.
+   * @return List of Datanodes that can be used for placement.
+   * @throws SCMException
+   */
+  public List<DatanodeDetails> getResultSet(
+      int nodesRequired, List<DatanodeDetails> healthyNodes)
+      throws SCMException {
+    List<DatanodeDetails> results = new LinkedList<>();
+    for (int x = 0; x < nodesRequired; x++) {
+      // invoke the choose function defined in the derived classes.
+      DatanodeDetails nodeId = chooseNode(healthyNodes);
+      if (nodeId != null) {
+        results.add(nodeId);
+      }
+    }
+
+    if (results.size() < nodesRequired) {
+      LOG.error("Unable to find the required number of healthy nodes that " +
+              "meet the criteria. Required nodes: {}, Found nodes: {}",
+          nodesRequired, results.size());
+      throw new SCMException("Unable to find required number of nodes.",
+          SCMException.ResultCodes.FAILED_TO_FIND_SUITABLE_NODE);
+    }
+    return results;
+  }
+
+  /**
+   * Choose a datanode according to the policy, this function is implemented
+   * by the actual policy class. For example, PlacementCapacity or
+   * PlacementRandom.
+   *
+   * @param healthyNodes - Set of healthy nodes we can choose from.
+   * @return DatanodeDetails
+   */
+  public abstract DatanodeDetails chooseNode(
+      List<DatanodeDetails> healthyNodes);
+
+
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java
new file mode 100644
index 0000000..85a6b54
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementCapacity.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.algorithms;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+/**
+ * Container placement policy that randomly choose datanodes with remaining
+ * space to satisfy the size constraints.
+ * <p>
+ * The Algorithm is as follows, Pick 2 random nodes from a given pool of nodes
+ * and then pick the node which lower utilization. This leads to a higher
+ * probability of nodes with lower utilization to be picked.
+ * <p>
+ * For those wondering why we choose two nodes randomly and choose the node
+ * with lower utilization. There are links to this original papers in
+ * HDFS-11564.
+ * <p>
+ * A brief summary -- We treat the nodes from a scale of lowest utilized to
+ * highest utilized, there are (s * ( s + 1)) / 2 possibilities to build
+ * distinct pairs of nodes.  There are s - k pairs of nodes in which the rank
+ * k node is less than the couple. So probability of a picking a node is
+ * (2 * (s -k)) / (s * (s - 1)).
+ * <p>
+ * In English, There is a much higher probability of picking less utilized nodes
+ * as compared to nodes with higher utilization since we pick 2 nodes and
+ * then pick the node with lower utilization.
+ * <p>
+ * This avoids the issue of users adding new nodes into the cluster and HDFS
+ * sending all traffic to those nodes if we only use a capacity based
+ * allocation scheme. Unless those nodes are part of the set of the first 2
+ * nodes then newer nodes will not be in the running to get the container.
+ * <p>
+ * This leads to an I/O pattern where the lower utilized nodes are favoured
+ * more than higher utilized nodes, but part of the I/O will still go to the
+ * older higher utilized nodes.
+ * <p>
+ * With this algorithm in place, our hope is that balancer tool needs to do
+ * little or no work and the cluster will achieve a balanced distribution
+ * over time.
+ */
+public final class SCMContainerPlacementCapacity extends SCMCommonPolicy {
+  @VisibleForTesting
+  static final Logger LOG =
+      LoggerFactory.getLogger(SCMContainerPlacementCapacity.class);
+
+  /**
+   * Constructs a Container Placement with considering only capacity.
+   * That is this policy tries to place containers based on node weight.
+   *
+   * @param nodeManager Node Manager
+   * @param conf Configuration
+   */
+  public SCMContainerPlacementCapacity(final NodeManager nodeManager,
+      final Configuration conf) {
+    super(nodeManager, conf);
+  }
+
+  /**
+   * Called by SCM to choose datanodes.
+   *
+   * @param nodesRequired - number of datanodes required.
+   * @param sizeRequired - size required for the container or block.
+   * @return List of datanodes.
+   * @throws SCMException  SCMException
+   */
+  @Override
+  public List<DatanodeDetails> chooseDatanodes(
+      final int nodesRequired, final long sizeRequired) throws SCMException {
+    List<DatanodeDetails> healthyNodes =
+        super.chooseDatanodes(nodesRequired, sizeRequired);
+    if (healthyNodes.size() == nodesRequired) {
+      return healthyNodes;
+    }
+    return getResultSet(nodesRequired, healthyNodes);
+  }
+
+  /**
+   * Find a node from the healthy list and return it after removing it from the
+   * list that we are operating on.
+   *
+   * @param healthyNodes - List of healthy nodes that meet the size
+   * requirement.
+   * @return DatanodeDetails that is chosen.
+   */
+  @Override
+  public DatanodeDetails chooseNode(List<DatanodeDetails> healthyNodes) {
+    int firstNodeNdx = getRand().nextInt(healthyNodes.size());
+    int secondNodeNdx = getRand().nextInt(healthyNodes.size());
+
+    DatanodeDetails datanodeDetails;
+    // There is a possibility that both numbers will be same.
+    // if that is so, we just return the node.
+    if (firstNodeNdx == secondNodeNdx) {
+      datanodeDetails = healthyNodes.get(firstNodeNdx);
+    } else {
+      DatanodeDetails firstNodeDetails = healthyNodes.get(firstNodeNdx);
+      DatanodeDetails secondNodeDetails = healthyNodes.get(secondNodeNdx);
+      SCMNodeMetric firstNodeMetric =
+          getNodeManager().getNodeStat(firstNodeDetails);
+      SCMNodeMetric secondNodeMetric =
+          getNodeManager().getNodeStat(secondNodeDetails);
+      datanodeDetails = firstNodeMetric.isGreater(secondNodeMetric.get())
+          ? firstNodeDetails : secondNodeDetails;
+    }
+    healthyNodes.remove(datanodeDetails);
+    return datanodeDetails;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementRandom.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementRandom.java
new file mode 100644
index 0000000..9903c84
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/SCMContainerPlacementRandom.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.algorithms;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+/**
+ * Container placement policy that randomly chooses healthy datanodes.
+ * This is very similar to current HDFS placement. That is we
+ * just randomly place containers without any considerations of utilization.
+ * <p>
+ * That means we rely on balancer to achieve even distribution of data.
+ * Balancer will need to support containers as a feature before this class
+ * can be practically used.
+ */
+public final class SCMContainerPlacementRandom extends SCMCommonPolicy
+    implements ContainerPlacementPolicy {
+  @VisibleForTesting
+  static final Logger LOG =
+      LoggerFactory.getLogger(SCMContainerPlacementRandom.class);
+
+  /**
+   * Construct a random Block Placement policy.
+   *
+   * @param nodeManager nodeManager
+   * @param conf Config
+   */
+  public SCMContainerPlacementRandom(final NodeManager nodeManager,
+      final Configuration conf) {
+    super(nodeManager, conf);
+  }
+
+  /**
+   * Choose datanodes called by the SCM to choose the datanode.
+   *
+   * @param nodesRequired - number of datanodes required.
+   * @param sizeRequired - size required for the container or block.
+   * @return List of Datanodes.
+   * @throws SCMException  SCMException
+   */
+  @Override
+  public List<DatanodeDetails> chooseDatanodes(
+      final int nodesRequired, final long sizeRequired) throws SCMException {
+    List<DatanodeDetails> healthyNodes =
+        super.chooseDatanodes(nodesRequired, sizeRequired);
+
+    if (healthyNodes.size() == nodesRequired) {
+      return healthyNodes;
+    }
+    return getResultSet(nodesRequired, healthyNodes);
+  }
+
+  /**
+   * Just chose a node randomly and remove it from the set of nodes we can
+   * chose from.
+   *
+   * @param healthyNodes - all healthy datanodes.
+   * @return one randomly chosen datanode that from two randomly chosen datanode
+   */
+  public DatanodeDetails chooseNode(final List<DatanodeDetails> healthyNodes) {
+    DatanodeDetails selectedNode =
+        healthyNodes.get(getRand().nextInt(healthyNodes.size()));
+    healthyNodes.remove(selectedNode);
+    return selectedNode;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/package-info.java
new file mode 100644
index 0000000..1cb810d
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container.placement.algorithms;
+// Various placement algorithms.
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/ContainerStat.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/ContainerStat.java
new file mode 100644
index 0000000..b8e8998
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/ContainerStat.java
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import java.io.IOException;
+
+/**
+ * This class represents the SCM container stat.
+ */
+public class ContainerStat {
+  /**
+   * The maximum container size.
+   */
+  @JsonProperty("Size")
+  private LongMetric size;
+
+  /**
+   * The number of bytes used by the container.
+   */
+  @JsonProperty("Used")
+  private LongMetric used;
+
+  /**
+   * The number of keys in the container.
+   */
+  @JsonProperty("KeyCount")
+  private LongMetric keyCount;
+
+  /**
+   * The number of bytes read from the container.
+   */
+  @JsonProperty("ReadBytes")
+  private LongMetric readBytes;
+
+  /**
+   * The number of bytes write into the container.
+   */
+  @JsonProperty("WriteBytes")
+  private LongMetric writeBytes;
+
+  /**
+   * The number of times the container is read.
+   */
+  @JsonProperty("ReadCount")
+  private LongMetric readCount;
+
+  /**
+   * The number of times the container is written into.
+   */
+  @JsonProperty("WriteCount")
+  private LongMetric writeCount;
+
+  public ContainerStat() {
+    this(0L, 0L, 0L, 0L, 0L, 0L, 0L);
+  }
+
+  public ContainerStat(long size, long used, long keyCount, long readBytes,
+      long writeBytes, long readCount, long writeCount) {
+    Preconditions.checkArgument(size >= 0,
+        "Container size cannot be " + "negative.");
+    Preconditions.checkArgument(used >= 0,
+        "Used space cannot be " + "negative.");
+    Preconditions.checkArgument(keyCount >= 0,
+        "Key count cannot be " + "negative");
+    Preconditions.checkArgument(readBytes >= 0,
+        "Read bytes read cannot be " + "negative.");
+    Preconditions.checkArgument(readBytes >= 0,
+        "Write bytes cannot be " + "negative.");
+    Preconditions.checkArgument(readCount >= 0,
+        "Read count cannot be " + "negative.");
+    Preconditions.checkArgument(writeCount >= 0,
+        "Write count cannot be " + "negative");
+
+    this.size = new LongMetric(size);
+    this.used = new LongMetric(used);
+    this.keyCount = new LongMetric(keyCount);
+    this.readBytes = new LongMetric(readBytes);
+    this.writeBytes = new LongMetric(writeBytes);
+    this.readCount = new LongMetric(readCount);
+    this.writeCount = new LongMetric(writeCount);
+  }
+
+  public LongMetric getSize() {
+    return size;
+  }
+
+  public LongMetric getUsed() {
+    return used;
+  }
+
+  public LongMetric getKeyCount() {
+    return keyCount;
+  }
+
+  public LongMetric getReadBytes() {
+    return readBytes;
+  }
+
+  public LongMetric getWriteBytes() {
+    return writeBytes;
+  }
+
+  public LongMetric getReadCount() {
+    return readCount;
+  }
+
+  public LongMetric getWriteCount() {
+    return writeCount;
+  }
+
+  public void add(ContainerStat stat) {
+    if (stat == null) {
+      return;
+    }
+
+    this.size.add(stat.getSize().get());
+    this.used.add(stat.getUsed().get());
+    this.keyCount.add(stat.getKeyCount().get());
+    this.readBytes.add(stat.getReadBytes().get());
+    this.writeBytes.add(stat.getWriteBytes().get());
+    this.readCount.add(stat.getReadCount().get());
+    this.writeCount.add(stat.getWriteCount().get());
+  }
+
+  public void subtract(ContainerStat stat) {
+    if (stat == null) {
+      return;
+    }
+
+    this.size.subtract(stat.getSize().get());
+    this.used.subtract(stat.getUsed().get());
+    this.keyCount.subtract(stat.getKeyCount().get());
+    this.readBytes.subtract(stat.getReadBytes().get());
+    this.writeBytes.subtract(stat.getWriteBytes().get());
+    this.readCount.subtract(stat.getReadCount().get());
+    this.writeCount.subtract(stat.getWriteCount().get());
+  }
+
+  public String toJsonString() {
+    try {
+      return JsonUtils.toJsonString(this);
+    } catch (IOException ignored) {
+      return null;
+    }
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/DatanodeMetric.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/DatanodeMetric.java
new file mode 100644
index 0000000..a6e732c
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/DatanodeMetric.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+
+/**
+ * DatanodeMetric acts as the basis for all the metric that is used in
+ * comparing 2 datanodes.
+ */
+public interface DatanodeMetric<T, S> extends Comparable<T> {
+
+  /**
+   * Some syntactic sugar over Comparable interface. This makes code easier to
+   * read.
+   *
+   * @param o - Other Object
+   * @return - True if *this* object is greater than argument.
+   */
+  boolean isGreater(T o);
+
+  /**
+   * Inverse of isGreater.
+   *
+   * @param o - other object.
+   * @return True if *this* object is Lesser than argument.
+   */
+  boolean isLess(T o);
+
+  /**
+   * Returns true if the object has same values. Because of issues with
+   * equals, and loss of type information this interface supports isEqual.
+   *
+   * @param o object to compare.
+   * @return True, if the values match.
+   */
+  boolean isEqual(T o);
+
+  /**
+   * A resourceCheck, defined by resourceNeeded.
+   * For example, S could be bytes required
+   * and DatanodeMetric can reply by saying it can be met or not.
+   *
+   * @param resourceNeeded -  ResourceNeeded in its own metric.
+   * @return boolean, True if this resource requirement can be met.
+   */
+  boolean hasResources(S resourceNeeded) throws SCMException;
+
+  /**
+   * Returns the metric.
+   *
+   * @return T, the object that represents this metric.
+   */
+  T get();
+
+  /**
+   * Sets the value of this metric.
+   *
+   * @param value - value of the metric.
+   */
+  void set(T value);
+
+  /**
+   * Adds a value of to the base.
+   * @param value - value
+   */
+  void add(T value);
+
+  /**
+   * subtract a value.
+   * @param value value
+   */
+  void subtract(T value);
+
+
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/LongMetric.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/LongMetric.java
new file mode 100644
index 0000000..050d26b
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/LongMetric.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+
+/**
+ * An helper class for all metrics based on Longs.
+ */
+@JsonAutoDetect(fieldVisibility = Visibility.ANY)
+public class LongMetric implements DatanodeMetric<Long, Long> {
+  private Long value;
+
+  /**
+   * Constructs a long Metric.
+   *
+   * @param value Value for this metric.
+   */
+  public LongMetric(Long value) {
+    this.value = value;
+  }
+
+  /**
+   * Some syntactic sugar over Comparable interface. This makes code easier to
+   * read.
+   *
+   * @param o - Other Object
+   * @return - True if *this* object is greater than argument.
+   */
+  @Override
+  public boolean isGreater(Long o) {
+    return compareTo(o) > 0;
+  }
+
+  /**
+   * Inverse of isGreater.
+   *
+   * @param o - other object.
+   * @return True if *this* object is Lesser than argument.
+   */
+  @Override
+  public boolean isLess(Long o) {
+    return compareTo(o) < 0;
+  }
+
+  /**
+   * Returns true if the object has same values. Because of issues with
+   * equals, and loss of type information this interface supports isEqual.
+   *
+   * @param o object to compare.
+   * @return True, if the values match.
+   */
+  @Override
+  public boolean isEqual(Long o) {
+    return compareTo(o) == 0;
+  }
+
+  /**
+   * A resourceCheck, defined by resourceNeeded.
+   * For example, S could be bytes required
+   * and DatanodeMetric can reply by saying it can be met or not.
+   *
+   * @param resourceNeeded -  ResourceNeeded in its own metric.
+   * @return boolean, True if this resource requirement can be met.
+   */
+  @Override
+  public boolean hasResources(Long resourceNeeded) {
+    return isGreater(resourceNeeded);
+  }
+
+  /**
+   * Returns the metric.
+   *
+   * @return T, the object that represents this metric.
+   */
+  @Override
+  public Long get() {
+    return this.value;
+  }
+
+  /**
+   * Sets the value of this metric.
+   *
+   * @param setValue - value of the metric.
+   */
+  @Override
+  public void set(Long setValue) {
+    this.value = setValue;
+
+  }
+
+  /**
+   * Adds a value of to the base.
+   *
+   * @param addValue - value
+   */
+  @Override
+  public void add(Long addValue) {
+    this.value += addValue;
+  }
+
+  /**
+   * subtract a value.
+   *
+   * @param subValue value
+   */
+  @Override
+  public void subtract(Long subValue) {
+    this.value -= subValue;
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less
+   * than, equal to, or greater than the specified object.
+   *
+   * @param o the object to be compared.
+   * @return a negative integer, zero, or a positive integer as this object is
+   * less than, equal to, or greater than the specified object.
+   * @throws NullPointerException if the specified object is null
+   * @throws ClassCastException   if the specified object's type prevents it
+   *                              from being compared to this object.
+   */
+  @Override
+  public int compareTo(Long o) {
+    return Long.compare(this.value, o);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    LongMetric that = (LongMetric) o;
+
+    return value != null ? value.equals(that.value) : that.value == null;
+  }
+
+  @Override
+  public int hashCode() {
+    return value != null ? value.hashCode() : 0;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/NodeStat.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/NodeStat.java
new file mode 100644
index 0000000..d6857d3
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/NodeStat.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Interface that defines Node Stats.
+ */
+interface NodeStat {
+  /**
+   * Get capacity of the node.
+   * @return capacity of the node.
+   */
+  LongMetric getCapacity();
+
+  /**
+   * Get the used space of the node.
+   * @return the used space of the node.
+   */
+  LongMetric getScmUsed();
+
+  /**
+   * Get the remaining space of the node.
+   * @return the remaining space of the node.
+   */
+  LongMetric getRemaining();
+
+  /**
+   * Set the total/used/remaining space.
+   * @param capacity - total space.
+   * @param used - used space.
+   * @param remain - remaining space.
+   */
+  @VisibleForTesting
+  void set(long capacity, long used, long remain);
+
+  /**
+   * Adding of the stat.
+   * @param stat - stat to be added.
+   * @return updated node stat.
+   */
+  NodeStat add(NodeStat stat);
+
+  /**
+   * Subtract of the stat.
+   * @param stat - stat to be subtracted.
+   * @return updated nodestat.
+   */
+  NodeStat subtract(NodeStat stat);
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMMetrics.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMMetrics.java
new file mode 100644
index 0000000..e4dd9aa
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMMetrics.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
+
+/**
+ * This class is for maintaining StorageContainerManager statistics.
+ */
+@Metrics(about="Storage Container Manager Metrics", context="dfs")
+public class SCMMetrics {
+  public static final String SOURCE_NAME =
+      SCMMetrics.class.getSimpleName();
+
+  /**
+   * Container stat metrics, the meaning of following metrics
+   * can be found in {@link ContainerStat}.
+   */
+  @Metric private MutableGaugeLong lastContainerReportSize;
+  @Metric private MutableGaugeLong lastContainerReportUsed;
+  @Metric private MutableGaugeLong lastContainerReportKeyCount;
+  @Metric private MutableGaugeLong lastContainerReportReadBytes;
+  @Metric private MutableGaugeLong lastContainerReportWriteBytes;
+  @Metric private MutableGaugeLong lastContainerReportReadCount;
+  @Metric private MutableGaugeLong lastContainerReportWriteCount;
+
+  @Metric private MutableCounterLong containerReportSize;
+  @Metric private MutableCounterLong containerReportUsed;
+  @Metric private MutableCounterLong containerReportKeyCount;
+  @Metric private MutableCounterLong containerReportReadBytes;
+  @Metric private MutableCounterLong containerReportWriteBytes;
+  @Metric private MutableCounterLong containerReportReadCount;
+  @Metric private MutableCounterLong containerReportWriteCount;
+
+  public SCMMetrics() {
+  }
+
+  public static SCMMetrics create() {
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    return ms.register(SOURCE_NAME, "Storage Container Manager Metrics",
+        new SCMMetrics());
+  }
+
+  public void setLastContainerReportSize(long size) {
+    this.lastContainerReportSize.set(size);
+  }
+
+  public void setLastContainerReportUsed(long used) {
+    this.lastContainerReportUsed.set(used);
+  }
+
+  public void setLastContainerReportKeyCount(long keyCount) {
+    this.lastContainerReportKeyCount.set(keyCount);
+  }
+
+  public void setLastContainerReportReadBytes(long readBytes) {
+    this.lastContainerReportReadBytes.set(readBytes);
+  }
+
+  public void setLastContainerReportWriteBytes(long writeBytes) {
+    this.lastContainerReportWriteBytes.set(writeBytes);
+  }
+
+  public void setLastContainerReportReadCount(long readCount) {
+    this.lastContainerReportReadCount.set(readCount);
+  }
+
+  public void setLastContainerReportWriteCount(long writeCount) {
+    this.lastContainerReportWriteCount.set(writeCount);
+  }
+
+  public void incrContainerReportSize(long size) {
+    this.containerReportSize.incr(size);
+  }
+
+  public void incrContainerReportUsed(long used) {
+    this.containerReportUsed.incr(used);
+  }
+
+  public void incrContainerReportKeyCount(long keyCount) {
+    this.containerReportKeyCount.incr(keyCount);
+  }
+
+  public void incrContainerReportReadBytes(long readBytes) {
+    this.containerReportReadBytes.incr(readBytes);
+  }
+
+  public void incrContainerReportWriteBytes(long writeBytes) {
+    this.containerReportWriteBytes.incr(writeBytes);
+  }
+
+  public void incrContainerReportReadCount(long readCount) {
+    this.containerReportReadCount.incr(readCount);
+  }
+
+  public void incrContainerReportWriteCount(long writeCount) {
+    this.containerReportWriteCount.incr(writeCount);
+  }
+
+  public void setLastContainerStat(ContainerStat newStat) {
+    this.lastContainerReportSize.set(newStat.getSize().get());
+    this.lastContainerReportUsed.set(newStat.getUsed().get());
+    this.lastContainerReportKeyCount.set(newStat.getKeyCount().get());
+    this.lastContainerReportReadBytes.set(newStat.getReadBytes().get());
+    this.lastContainerReportWriteBytes.set(newStat.getWriteBytes().get());
+    this.lastContainerReportReadCount.set(newStat.getReadCount().get());
+    this.lastContainerReportWriteCount.set(newStat.getWriteCount().get());
+  }
+
+  public void incrContainerStat(ContainerStat deltaStat) {
+    this.containerReportSize.incr(deltaStat.getSize().get());
+    this.containerReportUsed.incr(deltaStat.getUsed().get());
+    this.containerReportKeyCount.incr(deltaStat.getKeyCount().get());
+    this.containerReportReadBytes.incr(deltaStat.getReadBytes().get());
+    this.containerReportWriteBytes.incr(deltaStat.getWriteBytes().get());
+    this.containerReportReadCount.incr(deltaStat.getReadCount().get());
+    this.containerReportWriteCount.incr(deltaStat.getWriteCount().get());
+  }
+
+  public void decrContainerStat(ContainerStat deltaStat) {
+    this.containerReportSize.incr(-1 * deltaStat.getSize().get());
+    this.containerReportUsed.incr(-1 * deltaStat.getUsed().get());
+    this.containerReportKeyCount.incr(-1 * deltaStat.getKeyCount().get());
+    this.containerReportReadBytes.incr(-1 * deltaStat.getReadBytes().get());
+    this.containerReportWriteBytes.incr(-1 * deltaStat.getWriteBytes().get());
+    this.containerReportReadCount.incr(-1 * deltaStat.getReadCount().get());
+    this.containerReportWriteCount.incr(-1 * deltaStat.getWriteCount().get());
+  }
+
+  public void unRegister() {
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    ms.unregisterSource(SOURCE_NAME);
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeMetric.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeMetric.java
new file mode 100644
index 0000000..b50376d
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeMetric.java
@@ -0,0 +1,223 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+
+/**
+ * SCM Node Metric that is used in the placement classes.
+ */
+public class SCMNodeMetric implements DatanodeMetric<SCMNodeStat, Long> {
+  private SCMNodeStat stat;
+
+  /**
+   * Constructs an SCMNode Metric.
+   *
+   * @param stat - SCMNodeStat.
+   */
+  public SCMNodeMetric(SCMNodeStat stat) {
+    this.stat = stat;
+  }
+
+  /**
+   * Set the capacity, used and remaining space on a datanode.
+   *
+   * @param capacity in bytes
+   * @param used in bytes
+   * @param remaining in bytes
+   */
+  @VisibleForTesting
+  public SCMNodeMetric(long capacity, long used, long remaining) {
+    this.stat = new SCMNodeStat();
+    this.stat.set(capacity, used, remaining);
+  }
+
+  /**
+   *
+   * @param o - Other Object
+   * @return - True if *this* object is greater than argument.
+   */
+  @Override
+  public boolean isGreater(SCMNodeStat o) {
+    Preconditions.checkNotNull(o, "Argument cannot be null");
+
+    // if zero, replace with 1 for the division to work.
+    long thisDenominator = (this.stat.getCapacity().get() == 0)
+        ? 1 : this.stat.getCapacity().get();
+    long otherDenominator = (o.getCapacity().get() == 0)
+        ? 1 : o.getCapacity().get();
+
+    float thisNodeWeight =
+        stat.getScmUsed().get() / (float) thisDenominator;
+
+    float oNodeWeight =
+        o.getScmUsed().get() / (float) otherDenominator;
+
+    if (Math.abs(thisNodeWeight - oNodeWeight) > 0.000001) {
+      return thisNodeWeight > oNodeWeight;
+    }
+    // if these nodes are have similar weight then return the node with more
+    // free space as the greater node.
+    return stat.getRemaining().isGreater(o.getRemaining().get());
+  }
+
+  /**
+   * Inverse of isGreater.
+   *
+   * @param o - other object.
+   * @return True if *this* object is Lesser than argument.
+   */
+  @Override
+  public boolean isLess(SCMNodeStat o) {
+    Preconditions.checkNotNull(o, "Argument cannot be null");
+
+    // if zero, replace with 1 for the division to work.
+    long thisDenominator = (this.stat.getCapacity().get() == 0)
+        ? 1 : this.stat.getCapacity().get();
+    long otherDenominator = (o.getCapacity().get() == 0)
+        ? 1 : o.getCapacity().get();
+
+    float thisNodeWeight =
+        stat.getScmUsed().get() / (float) thisDenominator;
+
+    float oNodeWeight =
+        o.getScmUsed().get() / (float) otherDenominator;
+
+    if (Math.abs(thisNodeWeight - oNodeWeight) > 0.000001) {
+      return thisNodeWeight < oNodeWeight;
+    }
+
+    // if these nodes are have similar weight then return the node with less
+    // free space as the lesser node.
+    return stat.getRemaining().isLess(o.getRemaining().get());
+  }
+
+  /**
+   * Returns true if the object has same values. Because of issues with
+   * equals, and loss of type information this interface supports isEqual.
+   *
+   * @param o object to compare.
+   * @return True, if the values match.
+   * TODO : Consider if it makes sense to add remaining to this equation.
+   */
+  @Override
+  public boolean isEqual(SCMNodeStat o) {
+    float thisNodeWeight = stat.getScmUsed().get() / (float)
+        stat.getCapacity().get();
+    float oNodeWeight = o.getScmUsed().get() / (float) o.getCapacity().get();
+    return Math.abs(thisNodeWeight - oNodeWeight) < 0.000001;
+  }
+
+  /**
+   * A resourceCheck, defined by resourceNeeded.
+   * For example, S could be bytes required
+   * and DatanodeMetric can reply by saying it can be met or not.
+   *
+   * @param resourceNeeded -  ResourceNeeded in its own metric.
+   * @return boolean, True if this resource requirement can be met.
+   */
+  @Override
+  public boolean hasResources(Long resourceNeeded) {
+    return false;
+  }
+
+  /**
+   * Returns the metric.
+   *
+   * @return T, the object that represents this metric.
+   */
+  @Override
+  public SCMNodeStat get() {
+    return stat;
+  }
+
+  /**
+   * Sets the value of this metric.
+   *
+   * @param value - value of the metric.
+   */
+  @Override
+  public void set(SCMNodeStat value) {
+    stat.set(value.getCapacity().get(), value.getScmUsed().get(),
+        value.getRemaining().get());
+  }
+
+  /**
+   * Adds a value of to the base.
+   *
+   * @param value - value
+   */
+  @Override
+  public void add(SCMNodeStat value) {
+    stat.add(value);
+  }
+
+  /**
+   * subtract a value.
+   *
+   * @param value value
+   */
+  @Override
+  public void subtract(SCMNodeStat value) {
+    stat.subtract(value);
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less
+   * than, equal to, or greater than the specified object.
+   *
+   * @param o the object to be compared.
+   * @return a negative integer, zero, or a positive integer as this object is
+   * less than, equal to, or greater than the specified object.
+   * @throws NullPointerException if the specified object is null
+   * @throws ClassCastException   if the specified object's type prevents it
+   *                              from being compared to this object.
+   */
+  @Override
+  public int compareTo(SCMNodeStat o) {
+    if (isEqual(o)) {
+      return 0;
+    }
+    if (isGreater(o)) {
+      return 1;
+    } else {
+      return -1;
+    }
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    SCMNodeMetric that = (SCMNodeMetric) o;
+
+    return stat != null ? stat.equals(that.stat) : that.stat == null;
+  }
+
+  @Override
+  public int hashCode() {
+    return stat != null ? stat.hashCode() : 0;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java
new file mode 100644
index 0000000..3c871d3
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/SCMNodeStat.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+
+/**
+ * This class represents the SCM node stat.
+ */
+public class SCMNodeStat implements NodeStat {
+  private LongMetric capacity;
+  private LongMetric scmUsed;
+  private LongMetric remaining;
+
+  public SCMNodeStat() {
+    this(0L, 0L, 0L);
+  }
+
+  public SCMNodeStat(SCMNodeStat other) {
+    this(other.capacity.get(), other.scmUsed.get(), other.remaining.get());
+  }
+
+  public SCMNodeStat(long capacity, long used, long remaining) {
+    Preconditions.checkArgument(capacity >= 0, "Capacity cannot be " +
+        "negative.");
+    Preconditions.checkArgument(used >= 0, "used space cannot be " +
+        "negative.");
+    Preconditions.checkArgument(remaining >= 0, "remaining cannot be " +
+        "negative");
+    this.capacity = new LongMetric(capacity);
+    this.scmUsed = new LongMetric(used);
+    this.remaining = new LongMetric(remaining);
+  }
+
+  /**
+   * @return the total configured capacity of the node.
+   */
+  public LongMetric getCapacity() {
+    return capacity;
+  }
+
+  /**
+   * @return the total SCM used space on the node.
+   */
+  public LongMetric getScmUsed() {
+    return scmUsed;
+  }
+
+  /**
+   * @return the total remaining space available on the node.
+   */
+  public LongMetric getRemaining() {
+    return remaining;
+  }
+
+  /**
+   * Set the capacity, used and remaining space on a datanode.
+   *
+   * @param newCapacity in bytes
+   * @param newUsed in bytes
+   * @param newRemaining in bytes
+   */
+  @VisibleForTesting
+  public void set(long newCapacity, long newUsed, long newRemaining) {
+    Preconditions.checkNotNull(newCapacity, "Capacity cannot be null");
+    Preconditions.checkNotNull(newUsed, "used cannot be null");
+    Preconditions.checkNotNull(newRemaining, "remaining cannot be null");
+
+    Preconditions.checkArgument(newCapacity >= 0, "Capacity cannot be " +
+        "negative.");
+    Preconditions.checkArgument(newUsed >= 0, "used space cannot be " +
+        "negative.");
+    Preconditions.checkArgument(newRemaining >= 0, "remaining cannot be " +
+        "negative");
+
+    this.capacity = new LongMetric(newCapacity);
+    this.scmUsed = new LongMetric(newUsed);
+    this.remaining = new LongMetric(newRemaining);
+  }
+
+  /**
+   * Adds a new nodestat to existing values of the node.
+   *
+   * @param stat Nodestat.
+   * @return SCMNodeStat
+   */
+  public SCMNodeStat add(NodeStat stat) {
+    this.capacity.set(this.getCapacity().get() + stat.getCapacity().get());
+    this.scmUsed.set(this.getScmUsed().get() + stat.getScmUsed().get());
+    this.remaining.set(this.getRemaining().get() + stat.getRemaining().get());
+    return this;
+  }
+
+  /**
+   * Subtracts the stat values from the existing NodeStat.
+   *
+   * @param stat SCMNodeStat.
+   * @return Modified SCMNodeStat
+   */
+  public SCMNodeStat subtract(NodeStat stat) {
+    this.capacity.set(this.getCapacity().get() - stat.getCapacity().get());
+    this.scmUsed.set(this.getScmUsed().get() - stat.getScmUsed().get());
+    this.remaining.set(this.getRemaining().get() - stat.getRemaining().get());
+    return this;
+  }
+
+  @Override
+  public boolean equals(Object to) {
+    if (to instanceof SCMNodeStat) {
+      SCMNodeStat tempStat = (SCMNodeStat) to;
+      return capacity.isEqual(tempStat.getCapacity().get()) &&
+          scmUsed.isEqual(tempStat.getScmUsed().get()) &&
+          remaining.isEqual(tempStat.getRemaining().get());
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    return Long.hashCode(capacity.get() ^ scmUsed.get() ^ remaining.get());
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/package-info.java
new file mode 100644
index 0000000..4a81d69
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/metrics/package-info.java
@@ -0,0 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container.placement.metrics;
+
+// Various metrics supported by Datanode and used by SCM in the placement
+// strategy.
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/package-info.java
new file mode 100644
index 0000000..dc54d9b
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/placement/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.container.placement;
+// Classes related to container placement.
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerSupervisor.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerSupervisor.java
new file mode 100644
index 0000000..c14303f
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerSupervisor.java
@@ -0,0 +1,343 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container.replication;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.node.NodePoolManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.concurrent.HadoopExecutors;
+import org.apache.hadoop.util.concurrent.HadoopThreadPoolExecutor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.PriorityQueue;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import static com.google.common.util.concurrent.Uninterruptibles
+    .sleepUninterruptibly;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_MAX_CONTAINER_REPORT_THREADS;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_MAX_CONTAINER_REPORT_THREADS_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS_DEFAULT;
+
+/**
+ * This class takes a set of container reports that belong to a pool and then
+ * computes the replication levels for each container.
+ */
+public class ContainerSupervisor implements Closeable {
+  public static final Logger LOG =
+      LoggerFactory.getLogger(ContainerSupervisor.class);
+
+  private final NodePoolManager poolManager;
+  private final HashSet<String> poolNames;
+  private final PriorityQueue<PeriodicPool> poolQueue;
+  private final NodeManager nodeManager;
+  private final long containerProcessingLag;
+  private final AtomicBoolean runnable;
+  private final ExecutorService executorService;
+  private final long maxPoolWait;
+  private long poolProcessCount;
+  private final List<InProgressPool> inProgressPoolList;
+  private final AtomicInteger threadFaultCount;
+  private final int inProgressPoolMaxCount;
+
+  private final ReadWriteLock inProgressPoolListLock;
+
+  /**
+   * Returns the number of times we have processed pools.
+   * @return long
+   */
+  public long getPoolProcessCount() {
+    return poolProcessCount;
+  }
+
+
+  /**
+   * Constructs a class that computes Replication Levels.
+   *
+   * @param conf - OzoneConfiguration
+   * @param nodeManager - Node Manager
+   * @param poolManager - Pool Manager
+   */
+  public ContainerSupervisor(Configuration conf, NodeManager nodeManager,
+                             NodePoolManager poolManager) {
+    Preconditions.checkNotNull(poolManager);
+    Preconditions.checkNotNull(nodeManager);
+    this.containerProcessingLag =
+        conf.getTimeDuration(OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL,
+            OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL_DEFAULT,
+            TimeUnit.SECONDS
+        ) * 1000;
+    int maxContainerReportThreads =
+        conf.getInt(OZONE_SCM_MAX_CONTAINER_REPORT_THREADS,
+            OZONE_SCM_MAX_CONTAINER_REPORT_THREADS_DEFAULT
+        );
+    this.maxPoolWait =
+        conf.getTimeDuration(OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT,
+            OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT_DEFAULT,
+            TimeUnit.MILLISECONDS);
+    this.inProgressPoolMaxCount = conf.getInt(
+        OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS,
+        OZONE_SCM_MAX_NODEPOOL_PROCESSING_THREADS_DEFAULT);
+    this.poolManager = poolManager;
+    this.nodeManager = nodeManager;
+    this.poolNames = new HashSet<>();
+    this.poolQueue = new PriorityQueue<>();
+    this.runnable = new AtomicBoolean(true);
+    this.threadFaultCount = new AtomicInteger(0);
+    this.executorService = newCachedThreadPool(
+        new ThreadFactoryBuilder().setDaemon(true)
+            .setNameFormat("Container Reports Processing Thread - %d")
+            .build(), maxContainerReportThreads);
+    this.inProgressPoolList = new LinkedList<>();
+    this.inProgressPoolListLock = new ReentrantReadWriteLock();
+
+    initPoolProcessThread();
+  }
+
+  private ExecutorService newCachedThreadPool(ThreadFactory threadFactory,
+      int maxThreads) {
+    return new HadoopThreadPoolExecutor(0, maxThreads, 60L, TimeUnit.SECONDS,
+        new LinkedBlockingQueue<>(), threadFactory);
+  }
+
+  /**
+   * Returns the number of pools that are under process right now.
+   * @return  int - Number of pools that are in process.
+   */
+  public int getInProgressPoolCount() {
+    return inProgressPoolList.size();
+  }
+
+  /**
+   * Exits the background thread.
+   */
+  public void setExit() {
+    this.runnable.set(false);
+  }
+
+  /**
+   * Adds or removes pools from names that we need to process.
+   *
+   * There are two different cases that we need to process.
+   * The case where some pools are being added and some times we have to
+   * handle cases where pools are removed.
+   */
+  private void refreshPools() {
+    List<String> pools = this.poolManager.getNodePools();
+    if (pools != null) {
+
+      HashSet<String> removedPools =
+          computePoolDifference(this.poolNames, new HashSet<>(pools));
+
+      HashSet<String> addedPools =
+          computePoolDifference(new HashSet<>(pools), this.poolNames);
+      // TODO: Support remove pool API in pool manager so that this code
+      // path can be tested. This never happens in the current code base.
+      for (String poolName : removedPools) {
+        for (PeriodicPool periodicPool : poolQueue) {
+          if (periodicPool.getPoolName().compareTo(poolName) == 0) {
+            poolQueue.remove(periodicPool);
+          }
+        }
+      }
+      // Remove the pool names that we have in the list.
+      this.poolNames.removeAll(removedPools);
+
+      for (String poolName : addedPools) {
+        poolQueue.add(new PeriodicPool(poolName));
+      }
+
+      // Add to the pool names we are tracking.
+      poolNames.addAll(addedPools);
+    }
+
+  }
+
+  /**
+   * Handle the case where pools are added.
+   *
+   * @param newPools - New Pools list
+   * @param oldPool - oldPool List.
+   */
+  private HashSet<String> computePoolDifference(HashSet<String> newPools,
+      Set<String> oldPool) {
+    Preconditions.checkNotNull(newPools);
+    Preconditions.checkNotNull(oldPool);
+    HashSet<String> newSet = new HashSet<>(newPools);
+    newSet.removeAll(oldPool);
+    return newSet;
+  }
+
+  private void initPoolProcessThread() {
+
+    /*
+     * Task that runs to check if we need to start a pool processing job.
+     * if so we create a pool reconciliation job and find out of all the
+     * expected containers are on the nodes.
+     */
+    Runnable processPools = () -> {
+      while (runnable.get()) {
+        // Make sure that we don't have any new pools.
+        refreshPools();
+        while (inProgressPoolList.size() < inProgressPoolMaxCount) {
+          PeriodicPool pool = poolQueue.poll();
+          if (pool != null) {
+            if (pool.getLastProcessedTime() + this.containerProcessingLag >
+                Time.monotonicNow()) {
+              LOG.debug("Not within the time window for processing: {}",
+                  pool.getPoolName());
+              // we might over sleep here, not a big deal.
+              sleepUninterruptibly(this.containerProcessingLag,
+                  TimeUnit.MILLISECONDS);
+            }
+            LOG.debug("Adding pool {} to container processing queue",
+                pool.getPoolName());
+            InProgressPool inProgressPool = new InProgressPool(maxPoolWait,
+                pool, this.nodeManager, this.poolManager, this.executorService);
+            inProgressPool.startReconciliation();
+            inProgressPoolListLock.writeLock().lock();
+            try {
+              inProgressPoolList.add(inProgressPool);
+            } finally {
+              inProgressPoolListLock.writeLock().unlock();
+            }
+            poolProcessCount++;
+          } else {
+            break;
+          }
+        }
+        sleepUninterruptibly(this.maxPoolWait, TimeUnit.MILLISECONDS);
+        inProgressPoolListLock.readLock().lock();
+        try {
+          for (InProgressPool inProgressPool : inProgressPoolList) {
+            inProgressPool.finalizeReconciliation();
+            poolQueue.add(inProgressPool.getPool());
+          }
+        } finally {
+          inProgressPoolListLock.readLock().unlock();
+        }
+        inProgressPoolListLock.writeLock().lock();
+        try {
+          inProgressPoolList.clear();
+        } finally {
+          inProgressPoolListLock.writeLock().unlock();
+        }
+      }
+    };
+
+    // We will have only one thread for pool processing.
+    Thread poolProcessThread = new Thread(processPools);
+    poolProcessThread.setDaemon(true);
+    poolProcessThread.setName("Pool replica thread");
+    poolProcessThread.setUncaughtExceptionHandler((Thread t, Throwable e) -> {
+      // Let us just restart this thread after logging a critical error.
+      // if this thread is not running we cannot handle commands from SCM.
+      LOG.error("Critical Error : Pool replica thread encountered an " +
+          "error. Thread: {} Error Count : {}", t.toString(), e,
+          threadFaultCount.incrementAndGet());
+      poolProcessThread.start();
+      // TODO : Add a config to restrict how many times we will restart this
+      // thread in a single session.
+    });
+    poolProcessThread.start();
+  }
+
+  /**
+   * Adds a container report to appropriate inProgress Pool.
+   * @param containerReport  -- Container report for a specific container from
+   * a datanode.
+   */
+  public void handleContainerReport(
+      ContainerReportsRequestProto containerReport) {
+    DatanodeDetails datanodeDetails = DatanodeDetails.getFromProtoBuf(
+        containerReport.getDatanodeDetails());
+    inProgressPoolListLock.readLock().lock();
+    try {
+      String poolName = poolManager.getNodePool(datanodeDetails);
+      for (InProgressPool ppool : inProgressPoolList) {
+        if (ppool.getPoolName().equalsIgnoreCase(poolName)) {
+          ppool.handleContainerReport(containerReport);
+          return;
+        }
+      }
+      // TODO: Decide if we can do anything else with this report.
+      LOG.debug("Discarding the container report for pool {}. " +
+              "That pool is not currently in the pool reconciliation process." +
+              " Container Name: {}", poolName,
+          containerReport.getDatanodeDetails());
+    } catch (SCMException e) {
+      LOG.warn("Skipping processing container report from datanode {}, "
+              + "cause: failed to get the corresponding node pool",
+          datanodeDetails.toString(), e);
+    } finally {
+      inProgressPoolListLock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Get in process pool list, used for testing.
+   * @return List of InProgressPool
+   */
+  @VisibleForTesting
+  public List<InProgressPool> getInProcessPoolList() {
+    return inProgressPoolList;
+  }
+
+  /**
+   * Shutdown the Container Replication Manager.
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+    setExit();
+    HadoopExecutors.shutdown(executorService, LOG, 5, TimeUnit.SECONDS);
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/InProgressPool.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/InProgressPool.java
new file mode 100644
index 0000000..ddbd213
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/InProgressPool.java
@@ -0,0 +1,313 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container.replication;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.node.NodePoolManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerInfo;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+import static com.google.common.util.concurrent.Uninterruptibles
+    .sleepUninterruptibly;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .INVALID;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE;
+
+/**
+ * These are pools that are actively checking for replication status of the
+ * containers.
+ */
+public final class InProgressPool {
+  public static final Logger LOG =
+      LoggerFactory.getLogger(InProgressPool.class);
+
+  private final PeriodicPool pool;
+  private final NodeManager nodeManager;
+  private final NodePoolManager poolManager;
+  private final ExecutorService executorService;
+  private final Map<String, Integer> containerCountMap;
+  private final Map<UUID, Boolean> processedNodeSet;
+  private final long startTime;
+  private ProgressStatus status;
+  private AtomicInteger nodeCount;
+  private AtomicInteger nodeProcessed;
+  private AtomicInteger containerProcessedCount;
+  private long maxWaitTime;
+  /**
+   * Constructs an pool that is being processed.
+   *  @param maxWaitTime - Maximum wait time in milliseconds.
+   * @param pool - Pool that we are working against
+   * @param nodeManager - Nodemanager
+   * @param poolManager - pool manager
+   * @param executorService - Shared Executor service.
+   */
+  InProgressPool(long maxWaitTime, PeriodicPool pool,
+      NodeManager nodeManager, NodePoolManager poolManager,
+                 ExecutorService executorService) {
+    Preconditions.checkNotNull(pool);
+    Preconditions.checkNotNull(nodeManager);
+    Preconditions.checkNotNull(poolManager);
+    Preconditions.checkNotNull(executorService);
+    Preconditions.checkArgument(maxWaitTime > 0);
+    this.pool = pool;
+    this.nodeManager = nodeManager;
+    this.poolManager = poolManager;
+    this.executorService = executorService;
+    this.containerCountMap = new ConcurrentHashMap<>();
+    this.processedNodeSet = new ConcurrentHashMap<>();
+    this.maxWaitTime = maxWaitTime;
+    startTime = Time.monotonicNow();
+  }
+
+  /**
+   * Returns periodic pool.
+   *
+   * @return PeriodicPool
+   */
+  public PeriodicPool getPool() {
+    return pool;
+  }
+
+  /**
+   * We are done if we have got reports from all nodes or we have
+   * done waiting for the specified time.
+   *
+   * @return true if we are done, false otherwise.
+   */
+  public boolean isDone() {
+    return (nodeCount.get() == nodeProcessed.get()) ||
+        (this.startTime + this.maxWaitTime) > Time.monotonicNow();
+  }
+
+  /**
+   * Gets the number of containers processed.
+   *
+   * @return int
+   */
+  public int getContainerProcessedCount() {
+    return containerProcessedCount.get();
+  }
+
+  /**
+   * Returns the start time in milliseconds.
+   *
+   * @return - Start Time.
+   */
+  public long getStartTime() {
+    return startTime;
+  }
+
+  /**
+   * Get the number of nodes in this pool.
+   *
+   * @return - node count
+   */
+  public int getNodeCount() {
+    return nodeCount.get();
+  }
+
+  /**
+   * Get the number of nodes that we have already processed container reports
+   * from.
+   *
+   * @return - Processed count.
+   */
+  public int getNodeProcessed() {
+    return nodeProcessed.get();
+  }
+
+  /**
+   * Returns the current status.
+   *
+   * @return Status
+   */
+  public ProgressStatus getStatus() {
+    return status;
+  }
+
+  /**
+   * Starts the reconciliation process for all the nodes in the pool.
+   */
+  public void startReconciliation() {
+    List<DatanodeDetails> datanodeDetailsList =
+        this.poolManager.getNodes(pool.getPoolName());
+    if (datanodeDetailsList.size() == 0) {
+      LOG.error("Datanode list for {} is Empty. Pool with no nodes ? ",
+          pool.getPoolName());
+      this.status = ProgressStatus.Error;
+      return;
+    }
+
+    nodeProcessed = new AtomicInteger(0);
+    containerProcessedCount = new AtomicInteger(0);
+    nodeCount = new AtomicInteger(0);
+    /*
+       Ask each datanode to send us commands.
+     */
+    SendContainerCommand cmd = SendContainerCommand.newBuilder().build();
+    for (DatanodeDetails dd : datanodeDetailsList) {
+      NodeState currentState = getNodestate(dd);
+      if (currentState == HEALTHY || currentState == STALE) {
+        nodeCount.incrementAndGet();
+        // Queue commands to all datanodes in this pool to send us container
+        // report. Since we ignore dead nodes, it is possible that we would have
+        // over replicated the container if the node comes back.
+        nodeManager.addDatanodeCommand(dd.getUuid(), cmd);
+      }
+    }
+    this.status = ProgressStatus.InProgress;
+    this.getPool().setLastProcessedTime(Time.monotonicNow());
+  }
+
+  /**
+   * Gets the node state.
+   *
+   * @param datanode - datanode information.
+   * @return NodeState.
+   */
+  private NodeState getNodestate(DatanodeDetails datanode) {
+    NodeState  currentState = INVALID;
+    int maxTry = 100;
+    // We need to loop to make sure that we will retry if we get
+    // node state unknown. This can lead to infinite loop if we send
+    // in unknown node ID. So max try count is used to prevent it.
+
+    int currentTry = 0;
+    while (currentState == INVALID && currentTry < maxTry) {
+      // Retry to make sure that we deal with the case of node state not
+      // known.
+      currentState = nodeManager.getNodeState(datanode);
+      currentTry++;
+      if (currentState == INVALID) {
+        // Sleep to make sure that this is not a tight loop.
+        sleepUninterruptibly(100, TimeUnit.MILLISECONDS);
+      }
+    }
+    if (currentState == INVALID) {
+      LOG.error("Not able to determine the state of Node: {}, Exceeded max " +
+          "try and node manager returns INVALID state. This indicates we " +
+          "are dealing with a node that we don't know about.", datanode);
+    }
+    return currentState;
+  }
+
+  /**
+   * Queues a container Report for handling. This is done in a worker thread
+   * since decoding a container report might be compute intensive . We don't
+   * want to block since we have asked for bunch of container reports
+   * from a set of datanodes.
+   *
+   * @param containerReport - ContainerReport
+   */
+  public void handleContainerReport(
+      ContainerReportsRequestProto containerReport) {
+    if (status == ProgressStatus.InProgress) {
+      executorService.submit(processContainerReport(containerReport));
+    } else {
+      LOG.debug("Cannot handle container report when the pool is in {} status.",
+          status);
+    }
+  }
+
+  private Runnable processContainerReport(
+      ContainerReportsRequestProto reports) {
+    return () -> {
+      DatanodeDetails datanodeDetails =
+          DatanodeDetails.getFromProtoBuf(reports.getDatanodeDetails());
+      if (processedNodeSet.computeIfAbsent(datanodeDetails.getUuid(),
+          (k) -> true)) {
+        nodeProcessed.incrementAndGet();
+        LOG.debug("Total Nodes processed : {} Node Name: {} ", nodeProcessed,
+            datanodeDetails.getUuid());
+        for (ContainerInfo info : reports.getReportsList()) {
+          containerProcessedCount.incrementAndGet();
+          LOG.debug("Total Containers processed: {} Container Name: {}",
+              containerProcessedCount.get(), info.getContainerName());
+
+          // Update the container map with count + 1 if the key exists or
+          // update the map with 1. Since this is a concurrentMap the
+          // computation and update is atomic.
+          containerCountMap.merge(info.getContainerName(), 1, Integer::sum);
+        }
+      }
+    };
+  }
+
+  /**
+   * Filter the containers based on specific rules.
+   *
+   * @param predicate -- Predicate to filter by
+   * @return A list of map entries.
+   */
+  public List<Map.Entry<String, Integer>> filterContainer(
+      Predicate<Map.Entry<String, Integer>> predicate) {
+    return containerCountMap.entrySet().stream()
+        .filter(predicate).collect(Collectors.toList());
+  }
+
+  /**
+   * Used only for testing, calling this will abort container report
+   * processing. This is very dangerous call and should not be made by any users
+   */
+  @VisibleForTesting
+  public void setDoneProcessing() {
+    nodeProcessed.set(nodeCount.get());
+  }
+
+  /**
+   * Returns the pool name.
+   *
+   * @return Name of the pool.
+   */
+  String getPoolName() {
+    return pool.getPoolName();
+  }
+
+  public void finalizeReconciliation() {
+    status = ProgressStatus.Done;
+    //TODO: Add finalizing logic. This is where actual reconciliation happens.
+  }
+
+  /**
+   * Current status of the computing replication status.
+   */
+  public enum ProgressStatus {
+    InProgress, Done, Error
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/PeriodicPool.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/PeriodicPool.java
new file mode 100644
index 0000000..ef28aa7
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/PeriodicPool.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container.replication;
+
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * Periodic pool is a pool with a time stamp, this allows us to process pools
+ * based on a cyclic clock.
+ */
+public class PeriodicPool implements Comparable<PeriodicPool> {
+  private final String poolName;
+  private long lastProcessedTime;
+  private AtomicLong totalProcessedCount;
+
+  /**
+   * Constructs a periodic pool.
+   *
+   * @param poolName - Name of the pool
+   */
+  public PeriodicPool(String poolName) {
+    this.poolName = poolName;
+    lastProcessedTime = 0;
+    totalProcessedCount = new AtomicLong(0);
+  }
+
+  /**
+   * Get pool Name.
+   * @return PoolName
+   */
+  public String getPoolName() {
+    return poolName;
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less
+   * than, equal to, or greater than the specified object.
+   *
+   * @param o the object to be compared.
+   * @return a negative integer, zero, or a positive integer as this object is
+   * less than, equal to, or greater than the specified object.
+   * @throws NullPointerException if the specified object is null
+   * @throws ClassCastException   if the specified object's type prevents it
+   *                              from being compared to this object.
+   */
+  @Override
+  public int compareTo(PeriodicPool o) {
+    return Long.compare(this.lastProcessedTime, o.lastProcessedTime);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    PeriodicPool that = (PeriodicPool) o;
+
+    return poolName.equals(that.poolName);
+  }
+
+  @Override
+  public int hashCode() {
+    return poolName.hashCode();
+  }
+
+  /**
+   * Returns the Total Times we have processed this pool.
+   *
+   * @return processed count.
+   */
+  public long getTotalProcessedCount() {
+    return totalProcessedCount.get();
+  }
+
+  /**
+   * Gets the last time we processed this pool.
+   * @return time in milliseconds
+   */
+  public long getLastProcessedTime() {
+    return this.lastProcessedTime;
+  }
+
+
+  /**
+   * Sets the last processed time.
+   *
+   * @param lastProcessedTime - Long in milliseconds.
+   */
+
+  public void setLastProcessedTime(long lastProcessedTime) {
+    this.lastProcessedTime = lastProcessedTime;
+  }
+
+  /*
+   * Increments the total processed count.
+   */
+  public void incTotalProcessedCount() {
+    this.totalProcessedCount.incrementAndGet();
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/package-info.java
new file mode 100644
index 0000000..7bbe2ef
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container.replication;
+/*
+ This package contains routines that manage replication of a container. This
+ relies on container reports to understand the replication level of a
+ container - UnderReplicated, Replicated, OverReplicated -- and manages the
+ replication level based on that.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerAttribute.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerAttribute.java
new file mode 100644
index 0000000..288fa2d
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerAttribute.java
@@ -0,0 +1,245 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+package org.apache.hadoop.hdds.scm.container.states;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.container.ContainerID;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.TreeSet;
+
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_CHANGE_CONTAINER_STATE;
+
+/**
+ * Each Attribute that we manage for a container is maintained as a map.
+ * <p>
+ * Currently we manage the following attributes for a container.
+ * <p>
+ * 1. StateMap - LifeCycleState -> Set of ContainerIDs
+ * 2. TypeMap  - ReplicationType -> Set of ContainerIDs
+ * 3. OwnerMap - OwnerNames -> Set of ContainerIDs
+ * 4. FactorMap - ReplicationFactor -> Set of ContainerIDs
+ * <p>
+ * This means that for a cluster size of 750 PB -- we will have around 150
+ * Million containers, if we assume 5GB average container size.
+ * <p>
+ * That implies that these maps will take around 2/3 GB of RAM which will be
+ * pinned down in the SCM. This is deemed acceptable since we can tune the
+ * container size --say we make it 10GB average size, then we can deal with a
+ * cluster size of 1.5 exa bytes with the same metadata in SCMs memory.
+ * <p>
+ * Please note: **This class is not thread safe**. This used to be thread safe,
+ * while bench marking we found that ContainerStateMap would be taking 5
+ * locks for a single container insert. If we remove locks in this class,
+ * then we are able to perform about 540K operations per second, with the
+ * locks in this class it goes down to 246K operations per second. Hence we
+ * are going to rely on ContainerStateMap locks to maintain consistency of
+ * data in these classes too, since ContainerAttribute is only used by
+ * ContainerStateMap class.
+ */
+public class ContainerAttribute<T> {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerAttribute.class);
+
+  private final Map<T, NavigableSet<ContainerID>> attributeMap;
+  private static final NavigableSet<ContainerID> EMPTY_SET =  Collections
+      .unmodifiableNavigableSet(new TreeSet<>());
+
+  /**
+   * Creates a Container Attribute map from an existing Map.
+   *
+   * @param attributeMap - AttributeMap
+   */
+  public ContainerAttribute(Map<T, NavigableSet<ContainerID>> attributeMap) {
+    this.attributeMap = attributeMap;
+  }
+
+  /**
+   * Create an empty Container Attribute map.
+   */
+  public ContainerAttribute() {
+    this.attributeMap = new HashMap<>();
+  }
+
+  /**
+   * Insert or update the value in the Attribute map.
+   *
+   * @param key - The key to the set where the ContainerID should exist.
+   * @param value - Actual Container ID.
+   * @throws SCMException - on Error
+   */
+  public boolean insert(T key, ContainerID value) throws SCMException {
+    Preconditions.checkNotNull(key);
+    Preconditions.checkNotNull(value);
+
+    if (attributeMap.containsKey(key)) {
+      if (attributeMap.get(key).add(value)) {
+        return true; //we inserted the value as it doesn’t exist in the set.
+      } else { // Failure indicates that this ContainerID exists in the Set
+        if (!attributeMap.get(key).remove(value)) {
+          LOG.error("Failure to remove the object from the Map.Key:{}, " +
+              "ContainerID: {}", key, value);
+          throw new SCMException("Failure to remove the object from the Map",
+              FAILED_TO_CHANGE_CONTAINER_STATE);
+        }
+        attributeMap.get(key).add(value);
+        return true;
+      }
+    } else {
+      // This key does not exist, we need to allocate this key in the map.
+      // TODO: Replace TreeSet with FoldedTreeSet from HDFS Utils.
+      // Skipping for now, since FoldedTreeSet does not have implementations
+      // for headSet and TailSet. We need those calls.
+      this.attributeMap.put(key, new TreeSet<>());
+      // This should not fail, we just allocated this object.
+      attributeMap.get(key).add(value);
+      return true;
+    }
+  }
+
+  /**
+   * Returns true if have this bucket in the attribute map.
+   *
+   * @param key - Key to lookup
+   * @return true if we have the key
+   */
+  public boolean hasKey(T key) {
+    Preconditions.checkNotNull(key);
+    return this.attributeMap.containsKey(key);
+  }
+
+  /**
+   * Returns true if we have the key and the containerID in the bucket.
+   *
+   * @param key - Key to the bucket
+   * @param id - container ID that we want to lookup
+   * @return true or false
+   */
+  public boolean hasContainerID(T key, ContainerID id) {
+    Preconditions.checkNotNull(key);
+    Preconditions.checkNotNull(id);
+
+    return this.attributeMap.containsKey(key) &&
+        this.attributeMap.get(key).contains(id);
+  }
+
+  /**
+   * Returns true if we have the key and the containerID in the bucket.
+   *
+   * @param key - Key to the bucket
+   * @param id - container ID that we want to lookup
+   * @return true or false
+   */
+  public boolean hasContainerID(T key, int id) {
+    return hasContainerID(key, ContainerID.valueof(id));
+  }
+
+  /**
+   * Clears all entries for this key type.
+   *
+   * @param key - Key that identifies the Set.
+   */
+  public void clearSet(T key) {
+    Preconditions.checkNotNull(key);
+
+    if (attributeMap.containsKey(key)) {
+      attributeMap.get(key).clear();
+    } else {
+      LOG.debug("key: {} does not exist in the attributeMap", key);
+    }
+  }
+
+  /**
+   * Removes a container ID from the set pointed by the key.
+   *
+   * @param key - key to identify the set.
+   * @param value - Container ID
+   */
+  public boolean remove(T key, ContainerID value) {
+    Preconditions.checkNotNull(key);
+    Preconditions.checkNotNull(value);
+
+    if (attributeMap.containsKey(key)) {
+      if (!attributeMap.get(key).remove(value)) {
+        LOG.debug("ContainerID: {} does not exist in the set pointed by " +
+            "key:{}", value, key);
+        return false;
+      }
+      return true;
+    } else {
+      LOG.debug("key: {} does not exist in the attributeMap", key);
+      return false;
+    }
+  }
+
+  /**
+   * Returns the collection that maps to the given key.
+   *
+   * @param key - Key to the bucket.
+   * @return Underlying Set in immutable form.
+   */
+  public NavigableSet<ContainerID> getCollection(T key) {
+    Preconditions.checkNotNull(key);
+
+    if (this.attributeMap.containsKey(key)) {
+      return Collections.unmodifiableNavigableSet(this.attributeMap.get(key));
+    }
+    LOG.debug("No such Key. Key {}", key);
+    return EMPTY_SET;
+  }
+
+  /**
+   * Moves a ContainerID from one bucket to another.
+   *
+   * @param currentKey - Current Key
+   * @param newKey - newKey
+   * @param value - ContainerID
+   * @throws SCMException on Error
+   */
+  public void update(T currentKey, T newKey, ContainerID value)
+      throws SCMException {
+    Preconditions.checkNotNull(currentKey);
+    Preconditions.checkNotNull(newKey);
+
+    boolean removed = false;
+    try {
+      removed = remove(currentKey, value);
+      if (!removed) {
+        throw new SCMException("Unable to find key in the current key bucket",
+            FAILED_TO_CHANGE_CONTAINER_STATE);
+      }
+      insert(newKey, value);
+    } catch (SCMException ex) {
+      // if we removed the key, insert it back to original bucket, since the
+      // next insert failed.
+      LOG.error("error in update.", ex);
+      if (removed) {
+        insert(currentKey, value);
+        LOG.trace("reinserted the removed key. {}", currentKey);
+      }
+      throw ex;
+    }
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerState.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerState.java
new file mode 100644
index 0000000..1dac36e
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerState.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.scm.container.states;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+/**
+ * Class that acts as the container state.
+ */
+public class ContainerState {
+  private final HddsProtos.ReplicationType type;
+  private final String owner;
+  private final HddsProtos.ReplicationFactor replicationFactor;
+
+  /**
+   * Constructs a Container Key.
+   *
+   * @param owner - Container Owners
+   * @param type - Replication Type.
+   * @param factor - Replication Factors
+   */
+  public ContainerState(String owner, HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor) {
+    this.type = type;
+    this.owner = owner;
+    this.replicationFactor = factor;
+  }
+
+
+  public HddsProtos.ReplicationType getType() {
+    return type;
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public HddsProtos.ReplicationFactor getFactor() {
+    return replicationFactor;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    ContainerState that = (ContainerState) o;
+
+    return new EqualsBuilder()
+        .append(type, that.type)
+        .append(owner, that.owner)
+        .append(replicationFactor, that.replicationFactor)
+        .isEquals();
+  }
+
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder(137, 757)
+        .append(type)
+        .append(owner)
+        .append(replicationFactor)
+        .toHashCode();
+  }
+
+  @Override
+  public String toString() {
+    return "ContainerKey{" +
+        ", type=" + type +
+        ", owner=" + owner +
+        ", replicationFactor=" + replicationFactor +
+        '}';
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java
new file mode 100644
index 0000000..48c6423
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java
@@ -0,0 +1,405 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.scm.container.states;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.container.ContainerID;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.util.AutoCloseableLock;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.TreeSet;
+
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .CONTAINER_EXISTS;
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_CHANGE_CONTAINER_STATE;
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_FIND_CONTAINER;
+
+/**
+ * Container State Map acts like a unified map for various attributes that are
+ * used to select containers when we need allocated blocks.
+ * <p>
+ * This class provides the ability to query 4 classes of attributes. They are
+ * <p>
+ * 1. LifeCycleStates - LifeCycle States of container describe in which state
+ * a container is. For example, a container needs to be in Open State for a
+ * client to able to write to it.
+ * <p>
+ * 2. Owners - Each instance of Name service, for example, Namenode of HDFS or
+ * Key Space Manager (KSM) of Ozone or CBlockServer --  is an owner. It is
+ * possible to have many KSMs for a Ozone cluster and only one SCM. But SCM
+ * keeps the data from each KSM in separate bucket, never mixing them. To
+ * write data, often we have to find all open containers for a specific owner.
+ * <p>
+ * 3. ReplicationType - The clients are allowed to specify what kind of
+ * replication pipeline they want to use. Each Container exists on top of a
+ * pipeline, so we need to get ReplicationType that is specified by the user.
+ * <p>
+ * 4. ReplicationFactor - The replication factor represents how many copies
+ * of data should be made, right now we support 2 different types, ONE
+ * Replica and THREE Replica. User can specify how many copies should be made
+ * for a ozone key.
+ * <p>
+ * The most common access pattern of this class is to select a container based
+ * on all these parameters, for example, when allocating a block we will
+ * select a container that belongs to user1, with Ratis replication which can
+ * make 3 copies of data. The fact that we will look for open containers by
+ * default and if we cannot find them we will add new containers.
+ */
+public class ContainerStateMap {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerStateMap.class);
+
+  private final ContainerAttribute<LifeCycleState> lifeCycleStateMap;
+  private final ContainerAttribute<String> ownerMap;
+  private final ContainerAttribute<ReplicationFactor> factorMap;
+  private final ContainerAttribute<ReplicationType> typeMap;
+
+  private final Map<ContainerID, ContainerInfo> containerMap;
+  private final static NavigableSet<ContainerID> EMPTY_SET  =
+      Collections.unmodifiableNavigableSet(new TreeSet<>());
+
+  // Container State Map lock should be held before calling into
+  // Update ContainerAttributes. The consistency of ContainerAttributes is
+  // protected by this lock.
+  private final AutoCloseableLock autoLock;
+
+  /**
+   * Create a ContainerStateMap.
+   */
+  public ContainerStateMap() {
+    lifeCycleStateMap = new ContainerAttribute<>();
+    ownerMap = new ContainerAttribute<>();
+    factorMap = new ContainerAttribute<>();
+    typeMap = new ContainerAttribute<>();
+    containerMap = new HashMap<>();
+    autoLock = new AutoCloseableLock();
+//        new InstrumentedLock(getClass().getName(), LOG,
+//            new ReentrantLock(),
+//            1000,
+//            300));
+  }
+
+  /**
+   * Adds a ContainerInfo Entry in the ContainerStateMap.
+   *
+   * @param info - container info
+   * @throws SCMException - throws if create failed.
+   */
+  public void addContainer(ContainerInfo info)
+      throws SCMException {
+    Preconditions.checkNotNull(info, "Container Info cannot be null");
+    Preconditions.checkNotNull(info.getPipeline(), "Pipeline cannot be null");
+
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      ContainerID id = ContainerID.valueof(info.getContainerID());
+      if (containerMap.putIfAbsent(id, info) != null) {
+        LOG.debug("Duplicate container ID detected. {}", id);
+        throw new
+            SCMException("Duplicate container ID detected.",
+            CONTAINER_EXISTS);
+      }
+
+      lifeCycleStateMap.insert(info.getState(), id);
+      ownerMap.insert(info.getOwner(), id);
+      factorMap.insert(info.getPipeline().getFactor(), id);
+      typeMap.insert(info.getPipeline().getType(), id);
+      LOG.trace("Created container with {} successfully.", id);
+    }
+  }
+
+  /**
+   * Returns the latest state of Container from SCM's Container State Map.
+   *
+   * @param info - ContainerInfo
+   * @return ContainerInfo
+   */
+  public ContainerInfo getContainerInfo(ContainerInfo info) {
+    return getContainerInfo(info.getContainerID());
+  }
+
+  /**
+   * Returns the latest state of Container from SCM's Container State Map.
+   *
+   * @param containerID - int
+   * @return container info, if found.
+   */
+  public ContainerInfo getContainerInfo(long containerID) {
+    ContainerID id = new ContainerID(containerID);
+    return containerMap.get(id);
+  }
+
+  /**
+   * Returns the full container Map.
+   *
+   * @return - Map
+   */
+  public Map<ContainerID, ContainerInfo> getContainerMap() {
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      return Collections.unmodifiableMap(containerMap);
+    }
+  }
+
+  /**
+   * Just update the container State.
+   * @param info ContainerInfo.
+   */
+  public void updateContainerInfo(ContainerInfo info) throws SCMException {
+    Preconditions.checkNotNull(info);
+    ContainerInfo currentInfo = null;
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      currentInfo = containerMap.get(
+          ContainerID.valueof(info.getContainerID()));
+
+      if (currentInfo == null) {
+        throw new SCMException("No such container.", FAILED_TO_FIND_CONTAINER);
+      }
+      containerMap.put(info.containerID(), info);
+    }
+  }
+
+  /**
+   * Update the State of a container.
+   *
+   * @param info - ContainerInfo
+   * @param currentState - CurrentState
+   * @param newState - NewState.
+   * @throws SCMException - in case of failure.
+   */
+  public void updateState(ContainerInfo info, LifeCycleState currentState,
+      LifeCycleState newState) throws SCMException {
+    Preconditions.checkNotNull(currentState);
+    Preconditions.checkNotNull(newState);
+
+    ContainerID id = new ContainerID(info.getContainerID());
+    ContainerInfo currentInfo = null;
+
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      currentInfo = containerMap.get(id);
+
+      if (currentInfo == null) {
+        throw new
+            SCMException("No such container.", FAILED_TO_FIND_CONTAINER);
+      }
+      // We are updating two places before this update is done, these can
+      // fail independently, since the code needs to handle it.
+
+      // We update the attribute map, if that fails it will throw an exception,
+      // so no issues, if we are successful, we keep track of the fact that we
+      // have updated the lifecycle state in the map, and update the container
+      // state. If this second update fails, we will attempt to roll back the
+      // earlier change we did. If the rollback fails, we can be in an
+      // inconsistent state,
+
+      info.setState(newState);
+      containerMap.put(id, info);
+      lifeCycleStateMap.update(currentState, newState, id);
+      LOG.trace("Updated the container {} to new state. Old = {}, new = " +
+          "{}", id, currentState, newState);
+    } catch (SCMException ex) {
+      LOG.error("Unable to update the container state. {}", ex);
+      // we need to revert the change in this attribute since we are not
+      // able to update the hash table.
+      LOG.info("Reverting the update to lifecycle state. Moving back to " +
+              "old state. Old = {}, Attempted state = {}", currentState,
+          newState);
+
+      containerMap.put(id, currentInfo);
+
+      // if this line throws, the state map can be in an inconsistent
+      // state, since we will have modified the attribute by the
+      // container state will not in sync since we were not able to put
+      // that into the hash table.
+      lifeCycleStateMap.update(newState, currentState, id);
+
+      throw new SCMException("Updating the container map failed.", ex,
+          FAILED_TO_CHANGE_CONTAINER_STATE);
+    }
+  }
+
+  /**
+   * Returns A list of containers owned by a name service.
+   *
+   * @param ownerName - Name of the NameService.
+   * @return - NavigableSet of ContainerIDs.
+   */
+  NavigableSet<ContainerID> getContainerIDsByOwner(String ownerName) {
+    Preconditions.checkNotNull(ownerName);
+
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      return ownerMap.getCollection(ownerName);
+    }
+  }
+
+  /**
+   * Returns Containers in the System by the Type.
+   *
+   * @param type - Replication type -- StandAlone, Ratis etc.
+   * @return NavigableSet
+   */
+  NavigableSet<ContainerID> getContainerIDsByType(ReplicationType type) {
+    Preconditions.checkNotNull(type);
+
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      return typeMap.getCollection(type);
+    }
+  }
+
+  /**
+   * Returns Containers by replication factor.
+   *
+   * @param factor - Replication Factor.
+   * @return NavigableSet.
+   */
+  NavigableSet<ContainerID> getContainerIDsByFactor(ReplicationFactor factor) {
+    Preconditions.checkNotNull(factor);
+
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      return factorMap.getCollection(factor);
+    }
+  }
+
+  /**
+   * Returns Containers by State.
+   *
+   * @param state - State - Open, Closed etc.
+   * @return List of containers by state.
+   */
+  NavigableSet<ContainerID> getContainerIDsByState(LifeCycleState state) {
+    Preconditions.checkNotNull(state);
+
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+      return lifeCycleStateMap.getCollection(state);
+    }
+  }
+
+  /**
+   * Gets the containers that matches the  following filters.
+   *
+   * @param state - LifeCycleState
+   * @param owner - Owner
+   * @param factor - Replication Factor
+   * @param type - Replication Type
+   * @return ContainerInfo or Null if not container satisfies the criteria.
+   */
+  public NavigableSet<ContainerID> getMatchingContainerIDs(
+      LifeCycleState state, String owner,
+      ReplicationFactor factor, ReplicationType type) {
+
+    Preconditions.checkNotNull(state, "State cannot be null");
+    Preconditions.checkNotNull(owner, "Owner cannot be null");
+    Preconditions.checkNotNull(factor, "Factor cannot be null");
+    Preconditions.checkNotNull(type, "Type cannot be null");
+
+    try (AutoCloseableLock lock = autoLock.acquire()) {
+
+      // If we cannot meet any one condition we return EMPTY_SET immediately.
+      // Since when we intersect these sets, the result will be empty if any
+      // one is empty.
+      NavigableSet<ContainerID> stateSet =
+          lifeCycleStateMap.getCollection(state);
+      if (stateSet.size() == 0) {
+        return EMPTY_SET;
+      }
+
+      NavigableSet<ContainerID> ownerSet = ownerMap.getCollection(owner);
+      if (ownerSet.size() == 0) {
+        return EMPTY_SET;
+      }
+
+      NavigableSet<ContainerID> factorSet = factorMap.getCollection(factor);
+      if (factorSet.size() == 0) {
+        return EMPTY_SET;
+      }
+
+      NavigableSet<ContainerID> typeSet = typeMap.getCollection(type);
+      if (typeSet.size() == 0) {
+        return EMPTY_SET;
+      }
+
+
+      // if we add more constraints we will just add those sets here..
+      NavigableSet<ContainerID>[] sets = sortBySize(stateSet,
+          ownerSet, factorSet, typeSet);
+
+      NavigableSet<ContainerID> currentSet = sets[0];
+      // We take the smallest set and intersect against the larger sets. This
+      // allows us to reduce the lookups to the least possible number.
+      for (int x = 1; x < sets.length; x++) {
+        currentSet = intersectSets(currentSet, sets[x]);
+      }
+      return currentSet;
+    }
+  }
+
+  /**
+   * Calculates the intersection between sets and returns a new set.
+   *
+   * @param smaller - First Set
+   * @param bigger - Second Set
+   * @return resultSet which is the intersection of these two sets.
+   */
+  private NavigableSet<ContainerID> intersectSets(
+      NavigableSet<ContainerID> smaller,
+      NavigableSet<ContainerID> bigger) {
+    Preconditions.checkState(smaller.size() <= bigger.size(),
+        "This function assumes the first set is lesser or equal to second " +
+            "set");
+    NavigableSet<ContainerID> resultSet = new TreeSet<>();
+    for (ContainerID id : smaller) {
+      if (bigger.contains(id)) {
+        resultSet.add(id);
+      }
+    }
+    return resultSet;
+  }
+
+  /**
+   * Sorts a list of Sets based on Size. This is useful when we are
+   * intersecting the sets.
+   *
+   * @param sets - varagrs of sets
+   * @return Returns a sorted array of sets based on the size of the set.
+   */
+  @SuppressWarnings("unchecked")
+  private NavigableSet<ContainerID>[] sortBySize(
+      NavigableSet<ContainerID>... sets) {
+    for (int x = 0; x < sets.length - 1; x++) {
+      for (int y = 0; y < sets.length - x - 1; y++) {
+        if (sets[y].size() > sets[y + 1].size()) {
+          NavigableSet temp = sets[y];
+          sets[y] = sets[y + 1];
+          sets[y + 1] = temp;
+        }
+      }
+    }
+    return sets;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/package-info.java
new file mode 100644
index 0000000..cf20f39
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+/**
+ * Container States management package.
+ */
+package org.apache.hadoop.hdds.scm.container.states;
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/SCMException.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/SCMException.java
new file mode 100644
index 0000000..227df3c
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/SCMException.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.exceptions;
+
+import java.io.IOException;
+
+/**
+ * Exception thrown by SCM.
+ */
+public class SCMException extends IOException {
+  private final ResultCodes result;
+
+  /**
+   * Constructs an {@code IOException} with {@code null}
+   * as its error detail message.
+   */
+  public SCMException(ResultCodes result) {
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified detail message.
+   *
+   * @param message The detail message (which is saved for later retrieval by
+   * the
+   * {@link #getMessage()} method)
+   */
+  public SCMException(String message, ResultCodes result) {
+    super(message);
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified detail message
+   * and cause.
+   * <p>
+   * <p> Note that the detail message associated with {@code cause} is
+   * <i>not</i> automatically incorporated into this exception's detail
+   * message.
+   *
+   * @param message The detail message (which is saved for later retrieval by
+   * the
+   * {@link #getMessage()} method)
+   * @param cause The cause (which is saved for later retrieval by the {@link
+   * #getCause()} method).  (A null value is permitted, and indicates that the
+   * cause is nonexistent or unknown.)
+   * @since 1.6
+   */
+  public SCMException(String message, Throwable cause, ResultCodes result) {
+    super(message, cause);
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified cause and a
+   * detail message of {@code (cause==null ? null : cause.toString())}
+   * (which typically contains the class and detail message of {@code cause}).
+   * This constructor is useful for IO exceptions that are little more
+   * than wrappers for other throwables.
+   *
+   * @param cause The cause (which is saved for later retrieval by the {@link
+   * #getCause()} method).  (A null value is permitted, and indicates that the
+   * cause is nonexistent or unknown.)
+   * @since 1.6
+   */
+  public SCMException(Throwable cause, ResultCodes result) {
+    super(cause);
+    this.result = result;
+  }
+
+  /**
+   * Returns resultCode.
+   * @return ResultCode
+   */
+  public ResultCodes getResult() {
+    return result;
+  }
+
+  /**
+   * Error codes to make it easy to decode these exceptions.
+   */
+  public enum ResultCodes {
+    SUCCEESS,
+    FAILED_TO_LOAD_NODEPOOL,
+    FAILED_TO_FIND_NODE_IN_POOL,
+    FAILED_TO_FIND_HEALTHY_NODES,
+    FAILED_TO_FIND_NODES_WITH_SPACE,
+    FAILED_TO_FIND_SUITABLE_NODE,
+    INVALID_CAPACITY,
+    INVALID_BLOCK_SIZE,
+    CHILL_MODE_EXCEPTION,
+    FAILED_TO_LOAD_OPEN_CONTAINER,
+    FAILED_TO_ALLOCATE_CONTAINER,
+    FAILED_TO_CHANGE_CONTAINER_STATE,
+    CONTAINER_EXISTS,
+    FAILED_TO_FIND_CONTAINER,
+    FAILED_TO_FIND_CONTAINER_WITH_SPACE,
+    BLOCK_EXISTS,
+    FAILED_TO_FIND_BLOCK,
+    IO_EXCEPTION,
+    UNEXPECTED_CONTAINER_STATE,
+    SCM_NOT_INITIALIZED
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/package-info.java
new file mode 100644
index 0000000..7b69310
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/exceptions/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.exceptions;
+// Exceptions thrown by SCM.
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/CommandQueue.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/CommandQueue.java
new file mode 100644
index 0000000..edbcfa1
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/CommandQueue.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.node;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.util.Time;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Command Queue is queue of commands for the datanode.
+ * <p>
+ * Node manager, container Manager and key space managers can queue commands for
+ * datanodes into this queue. These commands will be send in the order in which
+ * there where queued.
+ */
+public class CommandQueue {
+  // This list is used as default return value.
+  private static final List<SCMCommand> DEFAULT_LIST = new LinkedList<>();
+  private final Map<UUID, Commands> commandMap;
+  private final Lock lock;
+  private long commandsInQueue;
+
+  /**
+   * Returns number of commands in queue.
+   * @return Command Count.
+   */
+  public long getCommandsInQueue() {
+    return commandsInQueue;
+  }
+
+  /**
+   * Constructs a Command Queue.
+   * TODO : Add a flusher thread that throws away commands older than a certain
+   * time period.
+   */
+  public CommandQueue() {
+    commandMap = new HashMap<>();
+    lock = new ReentrantLock();
+    commandsInQueue = 0;
+  }
+
+  /**
+   * This function is used only for test purposes.
+   */
+  @VisibleForTesting
+  public void clear() {
+    lock.lock();
+    try {
+      commandMap.clear();
+      commandsInQueue = 0;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Returns  a list of Commands for the datanode to execute, if we have no
+   * commands returns a empty list otherwise the current set of
+   * commands are returned and command map set to empty list again.
+   *
+   * @param datanodeUuid Datanode UUID
+   * @return List of SCM Commands.
+   */
+  @SuppressWarnings("unchecked")
+  List<SCMCommand> getCommand(final UUID datanodeUuid) {
+    lock.lock();
+    try {
+      Commands cmds = commandMap.remove(datanodeUuid);
+      List<SCMCommand> cmdList = null;
+      if(cmds != null) {
+        cmdList = cmds.getCommands();
+        commandsInQueue -= cmdList.size() > 0 ? cmdList.size() : 0;
+        // A post condition really.
+        Preconditions.checkState(commandsInQueue >= 0);
+      }
+      return cmds == null ? DEFAULT_LIST : cmdList;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Adds a Command to the SCM Queue to send the command to container.
+   *
+   * @param datanodeUuid DatanodeDetails.Uuid
+   * @param command    - Command
+   */
+  public void addCommand(final UUID datanodeUuid, final SCMCommand
+      command) {
+    lock.lock();
+    try {
+      if (commandMap.containsKey(datanodeUuid)) {
+        commandMap.get(datanodeUuid).add(command);
+      } else {
+        commandMap.put(datanodeUuid, new Commands(command));
+      }
+      commandsInQueue++;
+    } finally {
+      lock.unlock();
+    }
+  }
+
+  /**
+   * Class that stores commands for a datanode.
+   */
+  private static class Commands {
+    private long updateTime;
+    private long readTime;
+    private List<SCMCommand> commands;
+
+    /**
+     * Constructs a Commands class.
+     */
+    Commands() {
+      commands = new LinkedList<>();
+      updateTime = 0;
+      readTime = 0;
+    }
+
+    /**
+     * Creates the object and populates with the command.
+     * @param command command to add to queue.
+     */
+    Commands(SCMCommand command) {
+      this();
+      this.add(command);
+    }
+
+    /**
+     * Gets the last time the commands for this node was updated.
+     * @return Time stamp
+     */
+    public long getUpdateTime() {
+      return updateTime;
+    }
+
+    /**
+     * Gets the last read time.
+     * @return last time when these commands were read from this queue.
+     */
+    public long getReadTime() {
+      return readTime;
+    }
+
+    /**
+     * Adds a command to the list.
+     *
+     * @param command SCMCommand
+     */
+    public void add(SCMCommand command) {
+      this.commands.add(command);
+      updateTime = Time.monotonicNow();
+    }
+
+    /**
+     * Returns the commands for this datanode.
+     * @return command list.
+     */
+    public List<SCMCommand> getCommands() {
+      List<SCMCommand> temp = this.commands;
+      this.commands = new LinkedList<>();
+      readTime = Time.monotonicNow();
+      return temp;
+    }
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/HeartbeatQueueItem.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/HeartbeatQueueItem.java
new file mode 100644
index 0000000..43720f0
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/HeartbeatQueueItem.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.node;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+
+import static org.apache.hadoop.util.Time.monotonicNow;
+
+/**
+ * This class represents the item in SCM heartbeat queue.
+ */
+public class HeartbeatQueueItem {
+  private DatanodeDetails datanodeDetails;
+  private long recvTimestamp;
+  private SCMNodeReport nodeReport;
+  private ReportState containerReportState;
+
+  /**
+   *
+   * @param datanodeDetails - datanode ID of the heartbeat.
+   * @param recvTimestamp - heartbeat receive timestamp.
+   * @param nodeReport - node report associated with the heartbeat if any.
+   * @param containerReportState - container report state.
+   */
+  HeartbeatQueueItem(DatanodeDetails datanodeDetails, long recvTimestamp,
+      SCMNodeReport nodeReport, ReportState containerReportState) {
+    this.datanodeDetails = datanodeDetails;
+    this.recvTimestamp = recvTimestamp;
+    this.nodeReport = nodeReport;
+    this.containerReportState = containerReportState;
+  }
+
+  /**
+   * @return datanode ID.
+   */
+  public DatanodeDetails getDatanodeDetails() {
+    return datanodeDetails;
+  }
+
+  /**
+   * @return node report.
+   */
+  public SCMNodeReport getNodeReport() {
+    return nodeReport;
+  }
+
+  /**
+   * @return container report state.
+   */
+  public ReportState getContainerReportState() {
+    return containerReportState;
+  }
+
+  /**
+   * @return heartbeat receive timestamp.
+   */
+  public long getRecvTimestamp() {
+    return recvTimestamp;
+  }
+
+  /**
+   * Builder for HeartbeatQueueItem.
+   */
+  public static class Builder {
+    private DatanodeDetails datanodeDetails;
+    private SCMNodeReport nodeReport;
+    private ReportState containerReportState;
+    private long recvTimestamp = monotonicNow();
+
+    public Builder setDatanodeDetails(DatanodeDetails dnDetails) {
+      this.datanodeDetails = dnDetails;
+      return this;
+    }
+
+    public Builder setNodeReport(SCMNodeReport scmNodeReport) {
+      this.nodeReport = scmNodeReport;
+      return this;
+    }
+
+    public Builder setContainerReportState(ReportState crs) {
+      this.containerReportState = crs;
+      return this;
+    }
+
+    @VisibleForTesting
+    public Builder setRecvTimestamp(long recvTime) {
+      this.recvTimestamp = recvTime;
+      return this;
+    }
+
+    public HeartbeatQueueItem build() {
+      return new HeartbeatQueueItem(datanodeDetails, recvTimestamp, nodeReport,
+          containerReportState);
+    }
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java
new file mode 100644
index 0000000..4392633
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManager.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.node;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.ozone.protocol.StorageContainerNodeProtocol;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+
+import java.io.Closeable;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+/**
+ * A node manager supports a simple interface for managing a datanode.
+ * <p/>
+ * 1. A datanode registers with the NodeManager.
+ * <p/>
+ * 2. If the node is allowed to register, we add that to the nodes that we need
+ * to keep track of.
+ * <p/>
+ * 3. A heartbeat is made by the node at a fixed frequency.
+ * <p/>
+ * 4. A node can be in any of these 4 states: {HEALTHY, STALE, DEAD,
+ * DECOMMISSIONED}
+ * <p/>
+ * HEALTHY - It is a datanode that is regularly heartbeating us.
+ *
+ * STALE - A datanode for which we have missed few heart beats.
+ *
+ * DEAD - A datanode that we have not heard from for a while.
+ *
+ * DECOMMISSIONED - Someone told us to remove this node from the tracking
+ * list, by calling removeNode. We will throw away this nodes info soon.
+ */
+public interface NodeManager extends StorageContainerNodeProtocol,
+    NodeManagerMXBean, Closeable, Runnable {
+  /**
+   * Removes a data node from the management of this Node Manager.
+   *
+   * @param node - DataNode.
+   * @throws UnregisteredNodeException
+   */
+  void removeNode(DatanodeDetails node) throws UnregisteredNodeException;
+
+  /**
+   * Gets all Live Datanodes that is currently communicating with SCM.
+   * @param nodeState - State of the node
+   * @return List of Datanodes that are Heartbeating SCM.
+   */
+  List<DatanodeDetails> getNodes(NodeState nodeState);
+
+  /**
+   * Returns the Number of Datanodes that are communicating with SCM.
+   * @param nodeState - State of the node
+   * @return int -- count
+   */
+  int getNodeCount(NodeState nodeState);
+
+  /**
+   * Get all datanodes known to SCM.
+   *
+   * @return List of DatanodeDetails known to SCM.
+   */
+  List<DatanodeDetails> getAllNodes();
+
+  /**
+   * Chill mode is the period when node manager waits for a minimum
+   * configured number of datanodes to report in. This is called chill mode
+   * to indicate the period before node manager gets into action.
+   *
+   * Forcefully exits the chill mode, even if we have not met the minimum
+   * criteria of the nodes reporting in.
+   */
+  void forceExitChillMode();
+
+  /**
+   * Puts the node manager into manual chill mode.
+   */
+  void enterChillMode();
+
+  /**
+   * Brings node manager out of manual chill mode.
+   */
+  void exitChillMode();
+
+  /**
+   * Returns the aggregated node stats.
+   * @return the aggregated node stats.
+   */
+  SCMNodeStat getStats();
+
+  /**
+   * Return a map of node stats.
+   * @return a map of individual node stats (live/stale but not dead).
+   */
+  Map<UUID, SCMNodeStat> getNodeStats();
+
+  /**
+   * Return the node stat of the specified datanode.
+   * @param datanodeDetails DatanodeDetails.
+   * @return node stat if it is live/stale, null if it is dead or does't exist.
+   */
+  SCMNodeMetric getNodeStat(DatanodeDetails datanodeDetails);
+
+  /**
+   * Returns the NodePoolManager associated with the NodeManager.
+   * @return NodePoolManager
+   */
+  NodePoolManager getNodePoolManager();
+
+  /**
+   * Wait for the heartbeat is processed by NodeManager.
+   * @return true if heartbeat has been processed.
+   */
+  @VisibleForTesting
+  boolean waitForHeartbeatProcessed();
+
+  /**
+   * Returns the node state of a specific node.
+   * @param datanodeDetails DatanodeDetails
+   * @return Healthy/Stale/Dead.
+   */
+  NodeState getNodeState(DatanodeDetails datanodeDetails);
+
+  /**
+   * Add a {@link SCMCommand} to the command queue, which are
+   * handled by HB thread asynchronously.
+   * @param dnId datanode uuid
+   * @param command
+   */
+  void addDatanodeCommand(UUID dnId, SCMCommand command);
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManagerMXBean.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManagerMXBean.java
new file mode 100644
index 0000000..3ac993b
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodeManagerMXBean.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.node;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+import java.util.Map;
+
+/**
+ *
+ * This is the JMX management interface for node manager information.
+ */
+@InterfaceAudience.Private
+public interface NodeManagerMXBean {
+  /**
+   * Get the minimum number of nodes to get out of chill mode.
+   *
+   * @return int
+   */
+  int getMinimumChillModeNodes();
+
+  /**
+   * Returns a chill mode status string.
+   * @return String
+   */
+  String getChillModeStatus();
+
+
+  /**
+   * Returns true if node manager is out of chill mode, else false.
+   * @return true if out of chill mode, else false
+   */
+  boolean isOutOfChillMode();
+
+  /**
+   * Get the number of data nodes that in all states.
+   *
+   * @return A state to number of nodes that in this state mapping
+   */
+  Map<String, Integer> getNodeCount();
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodePoolManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodePoolManager.java
new file mode 100644
index 0000000..46faf9ca
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/NodePoolManager.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.node;
+
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Interface that defines SCM NodePoolManager.
+ */
+public interface NodePoolManager extends Closeable {
+
+  /**
+   * Add a node to a node pool.
+   * @param pool - name of the node pool.
+   * @param node - data node.
+   */
+  void addNode(String pool, DatanodeDetails node) throws IOException;
+
+  /**
+   * Remove a node from a node pool.
+   * @param pool - name of the node pool.
+   * @param node - data node.
+   * @throws SCMException
+   */
+  void removeNode(String pool, DatanodeDetails node)
+      throws SCMException;
+
+  /**
+   * Get a list of known node pools.
+   * @return a list of known node pool names or an empty list if not node pool
+   * is defined.
+   */
+  List<String> getNodePools();
+
+  /**
+   * Get all nodes of a node pool given the name of the node pool.
+   * @param pool - name of the node pool.
+   * @return a list of datanode ids or an empty list if the node pool was not
+   *  found.
+   */
+  List<DatanodeDetails> getNodes(String pool);
+
+  /**
+   * Get the node pool name if the node has been added to a node pool.
+   * @param datanodeDetails - datanode ID.
+   * @return node pool name if it has been assigned.
+   * null if the node has not been assigned to any node pool yet.
+   */
+  String getNodePool(DatanodeDetails datanodeDetails) throws SCMException;
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java
new file mode 100644
index 0000000..6857b11
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodeManager.java
@@ -0,0 +1,909 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.node;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.hadoop.hdds.scm.HddsServerUtil;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.hdds.scm.VersionInfo;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.DatanodeDetailsProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto
+    .ErrorCode;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMStorageReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto;
+import org.apache.hadoop.ipc.Server;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.ozone.protocol.StorageContainerNodeProtocol;
+import org.apache.hadoop.ozone.protocol.VersionResponse;
+import org.apache.hadoop.ozone.protocol.commands.RegisteredCommand;
+import org.apache.hadoop.ozone.protocol.commands.ReregisterCommand;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.concurrent.HadoopExecutors;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .INVALID;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE;
+import static org.apache.hadoop.util.Time.monotonicNow;
+
+/**
+ * Maintains information about the Datanodes on SCM side.
+ * <p>
+ * Heartbeats under SCM is very simple compared to HDFS heartbeatManager.
+ * <p>
+ * Here we maintain 3 maps, and we propagate a node from healthyNodesMap to
+ * staleNodesMap to deadNodesMap. This moving of a node from one map to another
+ * is controlled by 4 configuration variables. These variables define how many
+ * heartbeats must go missing for the node to move from one map to another.
+ * <p>
+ * Each heartbeat that SCMNodeManager receives is  put into heartbeatQueue. The
+ * worker thread wakes up and grabs that heartbeat from the queue. The worker
+ * thread will lookup the healthynodes map and set the timestamp if the entry
+ * is there. if not it will look up stale and deadnodes map.
+ * <p>
+ * The getNode(byState) functions make copy of node maps and then creates a list
+ * based on that. It should be assumed that these get functions always report
+ * *stale* information. For example, getting the deadNodeCount followed by
+ * getNodes(DEAD) could very well produce totally different count. Also
+ * getNodeCount(HEALTHY) + getNodeCount(DEAD) + getNodeCode(STALE), is not
+ * guaranteed to add up to the total nodes that we know off. Please treat all
+ * get functions in this file as a snap-shot of information that is inconsistent
+ * as soon as you read it.
+ */
+public class SCMNodeManager
+    implements NodeManager, StorageContainerNodeProtocol {
+
+  @VisibleForTesting
+  static final Logger LOG =
+      LoggerFactory.getLogger(SCMNodeManager.class);
+
+  /**
+   * Key = NodeID, value = timestamp.
+   */
+  private final ConcurrentHashMap<UUID, Long> healthyNodes;
+  private final ConcurrentHashMap<UUID, Long> staleNodes;
+  private final ConcurrentHashMap<UUID, Long> deadNodes;
+  private final Queue<HeartbeatQueueItem> heartbeatQueue;
+  private final ConcurrentHashMap<UUID, DatanodeDetails> nodes;
+  // Individual live node stats
+  private final ConcurrentHashMap<UUID, SCMNodeStat> nodeStats;
+  // Aggregated node stats
+  private SCMNodeStat scmStat;
+  // TODO: expose nodeStats and scmStat as metrics
+  private final AtomicInteger healthyNodeCount;
+  private final AtomicInteger staleNodeCount;
+  private final AtomicInteger deadNodeCount;
+  private final AtomicInteger totalNodes;
+  private long staleNodeIntervalMs;
+  private final long deadNodeIntervalMs;
+  private final long heartbeatCheckerIntervalMs;
+  private final long datanodeHBIntervalSeconds;
+  private final ScheduledExecutorService executorService;
+  private long lastHBcheckStart;
+  private long lastHBcheckFinished = 0;
+  private long lastHBProcessedCount;
+  private int chillModeNodeCount;
+  private final int maxHBToProcessPerLoop;
+  private final String clusterID;
+  private final VersionInfo version;
+  /**
+   * During start up of SCM, it will enter into chill mode and will be there
+   * until number of Datanodes registered reaches {@code chillModeNodeCount}.
+   * This flag is for tracking startup chill mode.
+   */
+  private AtomicBoolean inStartupChillMode;
+  /**
+   * Administrator can put SCM into chill mode manually.
+   * This flag is for tracking manual chill mode.
+   */
+  private AtomicBoolean inManualChillMode;
+  private final CommandQueue commandQueue;
+  // Node manager MXBean
+  private ObjectName nmInfoBean;
+
+  // Node pool manager.
+  private final SCMNodePoolManager nodePoolManager;
+  private final StorageContainerManager scmManager;
+
+  /**
+   * Constructs SCM machine Manager.
+   */
+  public SCMNodeManager(OzoneConfiguration conf, String clusterID,
+      StorageContainerManager scmManager) throws IOException {
+    heartbeatQueue = new ConcurrentLinkedQueue<>();
+    healthyNodes = new ConcurrentHashMap<>();
+    deadNodes = new ConcurrentHashMap<>();
+    staleNodes = new ConcurrentHashMap<>();
+    nodes = new ConcurrentHashMap<>();
+    nodeStats = new ConcurrentHashMap<>();
+    scmStat = new SCMNodeStat();
+
+    healthyNodeCount = new AtomicInteger(0);
+    staleNodeCount = new AtomicInteger(0);
+    deadNodeCount = new AtomicInteger(0);
+    totalNodes = new AtomicInteger(0);
+    this.clusterID = clusterID;
+    this.version = VersionInfo.getLatestVersion();
+    commandQueue = new CommandQueue();
+
+    // TODO: Support this value as a Percentage of known machines.
+    chillModeNodeCount = 1;
+
+    staleNodeIntervalMs = HddsServerUtil.getStaleNodeInterval(conf);
+    deadNodeIntervalMs = HddsServerUtil.getDeadNodeInterval(conf);
+    heartbeatCheckerIntervalMs =
+        HddsServerUtil.getScmheartbeatCheckerInterval(conf);
+    datanodeHBIntervalSeconds = HddsServerUtil.getScmHeartbeatInterval(conf);
+    maxHBToProcessPerLoop = HddsServerUtil.getMaxHBToProcessPerLoop(conf);
+
+    executorService = HadoopExecutors.newScheduledThreadPool(1,
+        new ThreadFactoryBuilder().setDaemon(true)
+            .setNameFormat("SCM Heartbeat Processing Thread - %d").build());
+
+    LOG.info("Entering startup chill mode.");
+    this.inStartupChillMode = new AtomicBoolean(true);
+    this.inManualChillMode = new AtomicBoolean(false);
+
+    Preconditions.checkState(heartbeatCheckerIntervalMs > 0);
+    executorService.schedule(this, heartbeatCheckerIntervalMs,
+        TimeUnit.MILLISECONDS);
+
+    registerMXBean();
+
+    this.nodePoolManager = new SCMNodePoolManager(conf);
+    this.scmManager = scmManager;
+  }
+
+  private void registerMXBean() {
+    this.nmInfoBean = MBeans.register("SCMNodeManager",
+        "SCMNodeManagerInfo", this);
+  }
+
+  private void unregisterMXBean() {
+    if(this.nmInfoBean != null) {
+      MBeans.unregister(this.nmInfoBean);
+      this.nmInfoBean = null;
+    }
+  }
+
+  /**
+   * Removes a data node from the management of this Node Manager.
+   *
+   * @param node - DataNode.
+   * @throws UnregisteredNodeException
+   */
+  @Override
+  public void removeNode(DatanodeDetails node) {
+    // TODO : Fix me when adding the SCM CLI.
+
+  }
+
+  /**
+   * Gets all datanodes that are in a certain state. This function works by
+   * taking a snapshot of the current collection and then returning the list
+   * from that collection. This means that real map might have changed by the
+   * time we return this list.
+   *
+   * @return List of Datanodes that are known to SCM in the requested state.
+   */
+  @Override
+  public List<DatanodeDetails> getNodes(NodeState nodestate)
+      throws IllegalArgumentException {
+    Map<UUID, Long> set;
+    switch (nodestate) {
+    case HEALTHY:
+      synchronized (this) {
+        set = Collections.unmodifiableMap(new HashMap<>(healthyNodes));
+      }
+      break;
+    case STALE:
+      synchronized (this) {
+        set = Collections.unmodifiableMap(new HashMap<>(staleNodes));
+      }
+      break;
+    case DEAD:
+      synchronized (this) {
+        set = Collections.unmodifiableMap(new HashMap<>(deadNodes));
+      }
+      break;
+    default:
+      throw new IllegalArgumentException("Unknown node state requested.");
+    }
+
+    return set.entrySet().stream().map(entry -> nodes.get(entry.getKey()))
+        .collect(Collectors.toList());
+  }
+
+  /**
+   * Returns all datanodes that are known to SCM.
+   *
+   * @return List of DatanodeDetails
+   */
+  @Override
+  public List<DatanodeDetails> getAllNodes() {
+    Map<UUID, DatanodeDetails> set;
+    synchronized (this) {
+      set = Collections.unmodifiableMap(new HashMap<>(nodes));
+    }
+    return set.entrySet().stream().map(entry -> nodes.get(entry.getKey()))
+        .collect(Collectors.toList());
+  }
+
+  /**
+   * Get the minimum number of nodes to get out of Chill mode.
+   *
+   * @return int
+   */
+  @Override
+  public int getMinimumChillModeNodes() {
+    return chillModeNodeCount;
+  }
+
+  /**
+   * Sets the Minimum chill mode nodes count, used only in testing.
+   *
+   * @param count - Number of nodes.
+   */
+  @VisibleForTesting
+  public void setMinimumChillModeNodes(int count) {
+    chillModeNodeCount = count;
+  }
+
+  /**
+   * Returns chill mode Status string.
+   * @return String
+   */
+  @Override
+  public String getChillModeStatus() {
+    if (inStartupChillMode.get()) {
+      return "Still in chill mode, waiting on nodes to report in." +
+          String.format(" %d nodes reported, minimal %d nodes required.",
+              totalNodes.get(), getMinimumChillModeNodes());
+    }
+    if (inManualChillMode.get()) {
+      return "Out of startup chill mode, but in manual chill mode." +
+          String.format(" %d nodes have reported in.", totalNodes.get());
+    }
+    return "Out of chill mode." +
+        String.format(" %d nodes have reported in.", totalNodes.get());
+  }
+
+  /**
+   * Forcefully exits the chill mode even if we have not met the minimum
+   * criteria of exiting the chill mode. This will exit from both startup
+   * and manual chill mode.
+   */
+  @Override
+  public void forceExitChillMode() {
+    if(inStartupChillMode.get()) {
+      LOG.info("Leaving startup chill mode.");
+      inStartupChillMode.set(false);
+    }
+    if(inManualChillMode.get()) {
+      LOG.info("Leaving manual chill mode.");
+      inManualChillMode.set(false);
+    }
+  }
+
+  /**
+   * Puts the node manager into manual chill mode.
+   */
+  @Override
+  public void enterChillMode() {
+    LOG.info("Entering manual chill mode.");
+    inManualChillMode.set(true);
+  }
+
+  /**
+   * Brings node manager out of manual chill mode.
+   */
+  @Override
+  public void exitChillMode() {
+    LOG.info("Leaving manual chill mode.");
+    inManualChillMode.set(false);
+  }
+
+  /**
+   * Returns true if node manager is out of chill mode, else false.
+   * @return true if out of chill mode, else false
+   */
+  @Override
+  public boolean isOutOfChillMode() {
+    return !(inStartupChillMode.get() || inManualChillMode.get());
+  }
+
+  /**
+   * Returns the Number of Datanodes by State they are in.
+   *
+   * @return int -- count
+   */
+  @Override
+  public int getNodeCount(NodeState nodestate) {
+    switch (nodestate) {
+    case HEALTHY:
+      return healthyNodeCount.get();
+    case STALE:
+      return staleNodeCount.get();
+    case DEAD:
+      return deadNodeCount.get();
+    case INVALID:
+      // This is unknown due to the fact that some nodes can be in
+      // transit between the other states. Returning a count for that is not
+      // possible. The fact that we have such state is to deal with the fact
+      // that this information might not be consistent always.
+      return 0;
+    default:
+      return 0;
+    }
+  }
+
+  /**
+   * Used for testing.
+   *
+   * @return true if the HB check is done.
+   */
+  @VisibleForTesting
+  @Override
+  public boolean waitForHeartbeatProcessed() {
+    return lastHBcheckFinished != 0;
+  }
+
+  /**
+   * Returns the node state of a specific node.
+   *
+   * @param datanodeDetails - Datanode Details
+   * @return Healthy/Stale/Dead/Unknown.
+   */
+  @Override
+  public NodeState getNodeState(DatanodeDetails datanodeDetails) {
+    // There is a subtle race condition here, hence we also support
+    // the NODEState.UNKNOWN. It is possible that just before we check the
+    // healthyNodes, we have removed the node from the healthy list but stil
+    // not added it to Stale Nodes list.
+    // We can fix that by adding the node to stale list before we remove, but
+    // then the node is in 2 states to avoid this race condition. Instead we
+    // just deal with the possibilty of getting a state called unknown.
+
+    UUID id = datanodeDetails.getUuid();
+    if(healthyNodes.containsKey(id)) {
+      return HEALTHY;
+    }
+
+    if(staleNodes.containsKey(id)) {
+      return STALE;
+    }
+
+    if(deadNodes.containsKey(id)) {
+      return DEAD;
+    }
+
+    return INVALID;
+  }
+
+  /**
+   * This is the real worker thread that processes the HB queue. We do the
+   * following things in this thread.
+   * <p>
+   * Process the Heartbeats that are in the HB Queue. Move Stale or Dead node to
+   * healthy if we got a heartbeat from them. Move Stales Node to dead node
+   * table if it is needed. Move healthy nodes to stale nodes if it is needed.
+   * <p>
+   * if it is a new node, we call register node and add it to the list of nodes.
+   * This will be replaced when we support registration of a node in SCM.
+   *
+   * @see Thread#run()
+   */
+  @Override
+  public void run() {
+    lastHBcheckStart = monotonicNow();
+    lastHBProcessedCount = 0;
+
+    // Process the whole queue.
+    while (!heartbeatQueue.isEmpty() &&
+        (lastHBProcessedCount < maxHBToProcessPerLoop)) {
+      HeartbeatQueueItem hbItem = heartbeatQueue.poll();
+      synchronized (this) {
+        handleHeartbeat(hbItem);
+      }
+      // we are shutting down or something give up processing the rest of
+      // HBs. This will terminate the HB processing thread.
+      if (Thread.currentThread().isInterrupted()) {
+        LOG.info("Current Thread is isInterrupted, shutting down HB " +
+            "processing thread for Node Manager.");
+        return;
+      }
+    }
+
+    if (lastHBProcessedCount >= maxHBToProcessPerLoop) {
+      LOG.error("SCM is being flooded by heartbeats. Not able to keep up with" +
+          " the heartbeat counts. Processed {} heartbeats. Breaking out of" +
+          " loop. Leaving rest to be processed later. ", lastHBProcessedCount);
+    }
+
+    // Iterate over the Stale nodes and decide if we need to move any node to
+    // dead State.
+    long currentTime = monotonicNow();
+    for (Map.Entry<UUID, Long> entry : staleNodes.entrySet()) {
+      if (currentTime - entry.getValue() > deadNodeIntervalMs) {
+        synchronized (this) {
+          moveStaleNodeToDead(entry);
+        }
+      }
+    }
+
+    // Iterate over the healthy nodes and decide if we need to move any node to
+    // Stale State.
+    currentTime = monotonicNow();
+    for (Map.Entry<UUID, Long> entry : healthyNodes.entrySet()) {
+      if (currentTime - entry.getValue() > staleNodeIntervalMs) {
+        synchronized (this) {
+          moveHealthyNodeToStale(entry);
+        }
+      }
+    }
+    lastHBcheckFinished = monotonicNow();
+
+    monitorHBProcessingTime();
+
+    // we purposefully make this non-deterministic. Instead of using a
+    // scheduleAtFixedFrequency  we will just go to sleep
+    // and wake up at the next rendezvous point, which is currentTime +
+    // heartbeatCheckerIntervalMs. This leads to the issue that we are now
+    // heart beating not at a fixed cadence, but clock tick + time taken to
+    // work.
+    //
+    // This time taken to work can skew the heartbeat processor thread.
+    // The reason why we don't care is because of the following reasons.
+    //
+    // 1. checkerInterval is general many magnitudes faster than datanode HB
+    // frequency.
+    //
+    // 2. if we have too much nodes, the SCM would be doing only HB
+    // processing, this could lead to SCM's CPU starvation. With this
+    // approach we always guarantee that  HB thread sleeps for a little while.
+    //
+    // 3. It is possible that we will never finish processing the HB's in the
+    // thread. But that means we have a mis-configured system. We will warn
+    // the users by logging that information.
+    //
+    // 4. And the most important reason, heartbeats are not blocked even if
+    // this thread does not run, they will go into the processing queue.
+
+    if (!Thread.currentThread().isInterrupted() &&
+        !executorService.isShutdown()) {
+      executorService.schedule(this, heartbeatCheckerIntervalMs, TimeUnit
+          .MILLISECONDS);
+    } else {
+      LOG.info("Current Thread is interrupted, shutting down HB processing " +
+          "thread for Node Manager.");
+    }
+  }
+
+  /**
+   * If we have taken too much time for HB processing, log that information.
+   */
+  private void monitorHBProcessingTime() {
+    if (TimeUnit.MILLISECONDS.toSeconds(lastHBcheckFinished -
+        lastHBcheckStart) > datanodeHBIntervalSeconds) {
+      LOG.error("Total time spend processing datanode HB's is greater than " +
+              "configured values for datanode heartbeats. Please adjust the" +
+              " heartbeat configs. Time Spend on HB processing: {} seconds " +
+              "Datanode heartbeat Interval: {} seconds , heartbeats " +
+              "processed: {}",
+          TimeUnit.MILLISECONDS
+              .toSeconds(lastHBcheckFinished - lastHBcheckStart),
+          datanodeHBIntervalSeconds, lastHBProcessedCount);
+    }
+  }
+
+  /**
+   * Moves a Healthy node to a Stale node state.
+   *
+   * @param entry - Map Entry
+   */
+  private void moveHealthyNodeToStale(Map.Entry<UUID, Long> entry) {
+    LOG.trace("Moving healthy node to stale: {}", entry.getKey());
+    healthyNodes.remove(entry.getKey());
+    healthyNodeCount.decrementAndGet();
+    staleNodes.put(entry.getKey(), entry.getValue());
+    staleNodeCount.incrementAndGet();
+
+    if (scmManager != null) {
+      // remove stale node's container report
+      scmManager.removeContainerReport(entry.getKey().toString());
+    }
+  }
+
+  /**
+   * Moves a Stale node to a dead node state.
+   *
+   * @param entry - Map Entry
+   */
+  private void moveStaleNodeToDead(Map.Entry<UUID, Long> entry) {
+    LOG.trace("Moving stale node to dead: {}", entry.getKey());
+    staleNodes.remove(entry.getKey());
+    staleNodeCount.decrementAndGet();
+    deadNodes.put(entry.getKey(), entry.getValue());
+    deadNodeCount.incrementAndGet();
+
+    // Update SCM node stats
+    SCMNodeStat deadNodeStat = nodeStats.get(entry.getKey());
+    scmStat.subtract(deadNodeStat);
+    nodeStats.remove(entry.getKey());
+  }
+
+  /**
+   * Handles a single heartbeat from a datanode.
+   *
+   * @param hbItem - heartbeat item from a datanode.
+   */
+  private void handleHeartbeat(HeartbeatQueueItem hbItem) {
+    lastHBProcessedCount++;
+
+    DatanodeDetails datanodeDetails = hbItem.getDatanodeDetails();
+    UUID datanodeUuid = datanodeDetails.getUuid();
+    SCMNodeReport nodeReport = hbItem.getNodeReport();
+    long recvTimestamp = hbItem.getRecvTimestamp();
+    long processTimestamp = Time.monotonicNow();
+    if (LOG.isTraceEnabled()) {
+      //TODO: add average queue time of heartbeat request as metrics
+      LOG.trace("Processing Heartbeat from datanode {}: queueing time {}",
+          datanodeUuid, processTimestamp - recvTimestamp);
+    }
+
+    // If this node is already in the list of known and healthy nodes
+    // just set the last timestamp and return.
+    if (healthyNodes.containsKey(datanodeUuid)) {
+      healthyNodes.put(datanodeUuid, processTimestamp);
+      updateNodeStat(datanodeUuid, nodeReport);
+      updateCommandQueue(datanodeUuid,
+          hbItem.getContainerReportState().getState());
+      return;
+    }
+
+    // A stale node has heartbeat us we need to remove the node from stale
+    // list and move to healthy list.
+    if (staleNodes.containsKey(datanodeUuid)) {
+      staleNodes.remove(datanodeUuid);
+      healthyNodes.put(datanodeUuid, processTimestamp);
+      healthyNodeCount.incrementAndGet();
+      staleNodeCount.decrementAndGet();
+      updateNodeStat(datanodeUuid, nodeReport);
+      updateCommandQueue(datanodeUuid,
+          hbItem.getContainerReportState().getState());
+      return;
+    }
+
+    // A dead node has heartbeat us, we need to remove that node from dead
+    // node list and move it to the healthy list.
+    if (deadNodes.containsKey(datanodeUuid)) {
+      deadNodes.remove(datanodeUuid);
+      healthyNodes.put(datanodeUuid, processTimestamp);
+      deadNodeCount.decrementAndGet();
+      healthyNodeCount.incrementAndGet();
+      updateNodeStat(datanodeUuid, nodeReport);
+      updateCommandQueue(datanodeUuid,
+          hbItem.getContainerReportState().getState());
+      return;
+    }
+
+    LOG.warn("SCM receive heartbeat from unregistered datanode {}",
+        datanodeUuid);
+    this.commandQueue.addCommand(datanodeUuid,
+        new ReregisterCommand());
+  }
+
+  private void updateNodeStat(UUID dnId, SCMNodeReport nodeReport) {
+    SCMNodeStat stat = nodeStats.get(dnId);
+    if (stat == null) {
+      LOG.debug("SCM updateNodeStat based on heartbeat from previous" +
+          "dead datanode {}", dnId);
+      stat = new SCMNodeStat();
+    }
+
+    if (nodeReport != null && nodeReport.getStorageReportCount() > 0) {
+      long totalCapacity = 0;
+      long totalRemaining = 0;
+      long totalScmUsed = 0;
+      List<SCMStorageReport> storageReports = nodeReport.getStorageReportList();
+      for (SCMStorageReport report : storageReports) {
+        totalCapacity += report.getCapacity();
+        totalRemaining +=  report.getRemaining();
+        totalScmUsed+= report.getScmUsed();
+      }
+      scmStat.subtract(stat);
+      stat.set(totalCapacity, totalScmUsed, totalRemaining);
+      nodeStats.put(dnId, stat);
+      scmStat.add(stat);
+    }
+  }
+
+  private void updateCommandQueue(UUID dnId,
+                                  ReportState.states containerReportState) {
+    if (containerReportState != null) {
+      switch (containerReportState) {
+      case completeContinerReport:
+        commandQueue.addCommand(dnId,
+            SendContainerCommand.newBuilder().build());
+        return;
+      case deltaContainerReport:
+      case noContainerReports:
+      default:
+        // do nothing
+      }
+    }
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated with it. If
+   * the stream is already closed then invoking this method has no effect.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+    unregisterMXBean();
+    nodePoolManager.close();
+    executorService.shutdown();
+    try {
+      if (!executorService.awaitTermination(5, TimeUnit.SECONDS)) {
+        executorService.shutdownNow();
+      }
+
+      if (!executorService.awaitTermination(5, TimeUnit.SECONDS)) {
+        LOG.error("Unable to shutdown NodeManager properly.");
+      }
+    } catch (InterruptedException e) {
+      executorService.shutdownNow();
+      Thread.currentThread().interrupt();
+    }
+  }
+
+  @VisibleForTesting
+  long getLastHBProcessedCount() {
+    return lastHBProcessedCount;
+  }
+
+  /**
+   * Gets the version info from SCM.
+   *
+   * @param versionRequest - version Request.
+   * @return - returns SCM version info and other required information needed by
+   * datanode.
+   */
+  @Override
+  public VersionResponse getVersion(SCMVersionRequestProto versionRequest) {
+    return VersionResponse.newBuilder()
+        .setVersion(this.version.getVersion())
+        .build();
+  }
+
+  /**
+   * Register the node if the node finds that it is not registered with any
+   * SCM.
+   *
+   * @param datanodeDetailsProto - Send datanodeDetails with Node info.
+   *                   This function generates and assigns new datanode ID
+   *                   for the datanode. This allows SCM to be run independent
+   *                   of Namenode if required.
+   *
+   * @return SCMHeartbeatResponseProto
+   */
+  @Override
+  public SCMCommand register(DatanodeDetailsProto datanodeDetailsProto) {
+
+    String hostname = null;
+    String ip = null;
+    DatanodeDetails datanodeDetails = DatanodeDetails.getFromProtoBuf(
+        datanodeDetailsProto);
+    InetAddress dnAddress = Server.getRemoteIp();
+    if (dnAddress != null) {
+      // Mostly called inside an RPC, update ip and peer hostname
+      hostname = dnAddress.getHostName();
+      ip = dnAddress.getHostAddress();
+      datanodeDetails.setHostName(hostname);
+      datanodeDetails.setIpAddress(ip);
+    }
+    SCMCommand responseCommand = verifyDatanodeUUID(datanodeDetails);
+    if (responseCommand != null) {
+      return responseCommand;
+    }
+    UUID dnId = datanodeDetails.getUuid();
+    nodes.put(dnId, datanodeDetails);
+    totalNodes.incrementAndGet();
+    healthyNodes.put(dnId, monotonicNow());
+    healthyNodeCount.incrementAndGet();
+    nodeStats.put(dnId, new SCMNodeStat());
+
+    if(inStartupChillMode.get() &&
+        totalNodes.get() >= getMinimumChillModeNodes()) {
+      inStartupChillMode.getAndSet(false);
+      LOG.info("Leaving startup chill mode.");
+    }
+
+    // TODO: define node pool policy for non-default node pool.
+    // For now, all nodes are added to the "DefaultNodePool" upon registration
+    // if it has not been added to any node pool yet.
+    try {
+      if (nodePoolManager.getNodePool(datanodeDetails) == null) {
+        nodePoolManager.addNode(SCMNodePoolManager.DEFAULT_NODEPOOL,
+            datanodeDetails);
+      }
+    } catch (IOException e) {
+      // TODO: make sure registration failure is handled correctly.
+      return RegisteredCommand.newBuilder()
+          .setErrorCode(ErrorCode.errorNodeNotPermitted)
+          .build();
+    }
+    LOG.info("Data node with ID: {} Registered.",
+        datanodeDetails.getUuid());
+    RegisteredCommand.Builder builder =
+        RegisteredCommand.newBuilder().setErrorCode(ErrorCode.success)
+            .setDatanodeUUID(datanodeDetails.getUuidString())
+            .setClusterID(this.clusterID);
+    if (hostname != null && ip != null) {
+      builder.setHostname(hostname).setIpAddress(ip);
+    }
+    return builder.build();
+  }
+
+  /**
+   * Verifies the datanode does not have a valid UUID already.
+   *
+   * @param datanodeDetails - Datanode Details.
+   * @return SCMCommand
+   */
+  private SCMCommand verifyDatanodeUUID(DatanodeDetails datanodeDetails) {
+    if (datanodeDetails.getUuid() != null &&
+        nodes.containsKey(datanodeDetails.getUuid())) {
+      LOG.trace("Datanode is already registered. Datanode: {}",
+          datanodeDetails.toString());
+      return RegisteredCommand.newBuilder()
+          .setErrorCode(ErrorCode.success)
+          .setClusterID(this.clusterID)
+          .setDatanodeUUID(datanodeDetails.getUuidString())
+          .build();
+    }
+    return null;
+  }
+
+  /**
+   * Send heartbeat to indicate the datanode is alive and doing well.
+   *
+   * @param datanodeDetailsProto - DatanodeDetailsProto.
+   * @param nodeReport - node report.
+   * @param containerReportState - container report state.
+   * @return SCMheartbeat response.
+   * @throws IOException
+   */
+  @Override
+  public List<SCMCommand> sendHeartbeat(
+      DatanodeDetailsProto datanodeDetailsProto, SCMNodeReport nodeReport,
+      ReportState containerReportState) {
+
+    Preconditions.checkNotNull(datanodeDetailsProto, "Heartbeat is missing " +
+        "DatanodeDetails.");
+    DatanodeDetails datanodeDetails = DatanodeDetails
+        .getFromProtoBuf(datanodeDetailsProto);
+    // Checking for NULL to make sure that we don't get
+    // an exception from ConcurrentList.
+    // This could be a problem in tests, if this function is invoked via
+    // protobuf, transport layer will guarantee that this is not null.
+    if (datanodeDetails != null) {
+      heartbeatQueue.add(
+          new HeartbeatQueueItem.Builder()
+              .setDatanodeDetails(datanodeDetails)
+              .setNodeReport(nodeReport)
+              .setContainerReportState(containerReportState)
+              .build());
+    } else {
+      LOG.error("Datanode ID in heartbeat is null");
+    }
+    return commandQueue.getCommand(datanodeDetails.getUuid());
+  }
+
+  /**
+   * Returns the aggregated node stats.
+   * @return the aggregated node stats.
+   */
+  @Override
+  public SCMNodeStat getStats() {
+    return new SCMNodeStat(this.scmStat);
+  }
+
+  /**
+   * Return a map of node stats.
+   * @return a map of individual node stats (live/stale but not dead).
+   */
+  @Override
+  public Map<UUID, SCMNodeStat> getNodeStats() {
+    return Collections.unmodifiableMap(nodeStats);
+  }
+
+  /**
+   * Return the node stat of the specified datanode.
+   * @param datanodeDetails - datanode ID.
+   * @return node stat if it is live/stale, null if it is dead or does't exist.
+   */
+  @Override
+  public SCMNodeMetric getNodeStat(DatanodeDetails datanodeDetails) {
+    return new SCMNodeMetric(nodeStats.get(datanodeDetails.getUuid()));
+  }
+
+  @Override
+  public NodePoolManager getNodePoolManager() {
+    return nodePoolManager;
+  }
+
+  @Override
+  public Map<String, Integer> getNodeCount() {
+    Map<String, Integer> nodeCountMap = new HashMap<String, Integer>();
+    for(NodeState state : NodeState.values()) {
+      nodeCountMap.put(state.toString(), getNodeCount(state));
+    }
+    return nodeCountMap;
+  }
+
+  @Override
+  public void addDatanodeCommand(UUID dnId, SCMCommand command) {
+    this.commandQueue.addCommand(dnId, command);
+  }
+
+  @VisibleForTesting
+  public void setStaleNodeIntervalMs(long interval) {
+    this.staleNodeIntervalMs = interval;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodePoolManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodePoolManager.java
new file mode 100644
index 0000000..a4a6c51
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/SCMNodePoolManager.java
@@ -0,0 +1,269 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.node;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.stream.Collectors;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_MB;
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_FIND_NODE_IN_POOL;
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
+    .FAILED_TO_LOAD_NODEPOOL;
+import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
+import static org.apache.hadoop.ozone.OzoneConsts.NODEPOOL_DB;
+
+/**
+ * SCM node pool manager that manges node pools.
+ */
+public final class SCMNodePoolManager implements NodePoolManager {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SCMNodePoolManager.class);
+  private static final List<DatanodeDetails> EMPTY_NODE_LIST =
+      new ArrayList<>();
+  private static final List<String> EMPTY_NODEPOOL_LIST = new ArrayList<>();
+  public static final String DEFAULT_NODEPOOL = "DefaultNodePool";
+
+  // DB that saves the node to node pool mapping.
+  private MetadataStore nodePoolStore;
+
+  // In-memory node pool to nodes mapping
+  private HashMap<String, Set<DatanodeDetails>> nodePools;
+
+  // Read-write lock for nodepool operations
+  private ReadWriteLock lock;
+
+  /**
+   * Construct SCMNodePoolManager class that manages node to node pool mapping.
+   * @param conf - configuration.
+   * @throws IOException
+   */
+  public SCMNodePoolManager(final OzoneConfiguration conf)
+      throws IOException {
+    final int cacheSize = conf.getInt(OZONE_SCM_DB_CACHE_SIZE_MB,
+        OZONE_SCM_DB_CACHE_SIZE_DEFAULT);
+    File metaDir = getOzoneMetaDirPath(conf);
+    String scmMetaDataDir = metaDir.getPath();
+    File nodePoolDBPath = new File(scmMetaDataDir, NODEPOOL_DB);
+    nodePoolStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf)
+        .setDbFile(nodePoolDBPath)
+        .setCacheSize(cacheSize * OzoneConsts.MB)
+        .build();
+    nodePools = new HashMap<>();
+    lock = new ReentrantReadWriteLock();
+    init();
+  }
+
+  /**
+   * Initialize the in-memory store based on persist store from level db.
+   * No lock is needed as init() is only invoked by constructor.
+   * @throws SCMException
+   */
+  private void init() throws SCMException {
+    try {
+      nodePoolStore.iterate(null, (key, value) -> {
+        try {
+          DatanodeDetails nodeId = DatanodeDetails.getFromProtoBuf(
+              HddsProtos.DatanodeDetailsProto.PARSER.parseFrom(key));
+          String poolName = DFSUtil.bytes2String(value);
+
+          Set<DatanodeDetails> nodePool = null;
+          if (nodePools.containsKey(poolName)) {
+            nodePool = nodePools.get(poolName);
+          } else {
+            nodePool = new HashSet<>();
+            nodePools.put(poolName, nodePool);
+          }
+          nodePool.add(nodeId);
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("Adding node: {} to node pool: {}",
+                nodeId, poolName);
+          }
+        } catch (IOException e) {
+          LOG.warn("Can't add a datanode to node pool, continue next...");
+        }
+        return true;
+      });
+    } catch (IOException e) {
+      LOG.error("Loading node pool error " + e);
+      throw new SCMException("Failed to load node pool",
+          FAILED_TO_LOAD_NODEPOOL);
+    }
+  }
+
+  /**
+   * Add a datanode to a node pool.
+   * @param pool - name of the node pool.
+   * @param node - name of the datanode.
+   */
+  @Override
+  public void addNode(final String pool, final DatanodeDetails node)
+      throws IOException {
+    Preconditions.checkNotNull(pool, "pool name is null");
+    Preconditions.checkNotNull(node, "node is null");
+    lock.writeLock().lock();
+    try {
+      // add to the persistent store
+      nodePoolStore.put(node.getProtoBufMessage().toByteArray(),
+          DFSUtil.string2Bytes(pool));
+
+      // add to the in-memory store
+      Set<DatanodeDetails> nodePool = null;
+      if (nodePools.containsKey(pool)) {
+        nodePool = nodePools.get(pool);
+      } else {
+        nodePool = new HashSet<DatanodeDetails>();
+        nodePools.put(pool, nodePool);
+      }
+      nodePool.add(node);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Remove a datanode from a node pool.
+   * @param pool - name of the node pool.
+   * @param node - datanode id.
+   * @throws SCMException
+   */
+  @Override
+  public void removeNode(final String pool, final DatanodeDetails node)
+      throws SCMException {
+    Preconditions.checkNotNull(pool, "pool name is null");
+    Preconditions.checkNotNull(node, "node is null");
+    lock.writeLock().lock();
+    try {
+      // Remove from the persistent store
+      byte[] kName = node.getProtoBufMessage().toByteArray();
+      byte[] kData = nodePoolStore.get(kName);
+      if (kData == null) {
+        throw new SCMException(String.format("Unable to find node %s from" +
+            " pool %s in DB.", DFSUtil.bytes2String(kName), pool),
+            FAILED_TO_FIND_NODE_IN_POOL);
+      }
+      nodePoolStore.delete(kName);
+
+      // Remove from the in-memory store
+      if (nodePools.containsKey(pool)) {
+        Set<DatanodeDetails> nodePool = nodePools.get(pool);
+        nodePool.remove(node);
+      } else {
+        throw new SCMException(String.format("Unable to find node %s from" +
+            " pool %s in MAP.", DFSUtil.bytes2String(kName), pool),
+            FAILED_TO_FIND_NODE_IN_POOL);
+      }
+    } catch (IOException e) {
+      throw new SCMException("Failed to remove node " + node.toString()
+          + " from node pool " + pool, e,
+          SCMException.ResultCodes.IO_EXCEPTION);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Get all the node pools.
+   * @return all the node pools.
+   */
+  @Override
+  public List<String> getNodePools() {
+    lock.readLock().lock();
+    try {
+      if (!nodePools.isEmpty()) {
+        return nodePools.keySet().stream().collect(Collectors.toList());
+      } else {
+        return EMPTY_NODEPOOL_LIST;
+      }
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Get all datanodes of a specific node pool.
+   * @param pool - name of the node pool.
+   * @return all datanodes of the specified node pool.
+   */
+  @Override
+  public List<DatanodeDetails> getNodes(final String pool) {
+    Preconditions.checkNotNull(pool, "pool name is null");
+    if (nodePools.containsKey(pool)) {
+      return nodePools.get(pool).stream().collect(Collectors.toList());
+    } else {
+      return EMPTY_NODE_LIST;
+    }
+  }
+
+  /**
+   * Get the node pool name if the node has been added to a node pool.
+   * @param datanodeDetails - datanode ID.
+   * @return node pool name if it has been assigned.
+   * null if the node has not been assigned to any node pool yet.
+   * TODO: Put this in a in-memory map if performance is an issue.
+   */
+  @Override
+  public String getNodePool(final DatanodeDetails datanodeDetails)
+      throws SCMException {
+    Preconditions.checkNotNull(datanodeDetails, "node is null");
+    try {
+      byte[]  result = nodePoolStore.get(
+          datanodeDetails.getProtoBufMessage().toByteArray());
+      return result == null ? null : DFSUtil.bytes2String(result);
+    } catch (IOException e) {
+      throw new SCMException("Failed to get node pool for node "
+          + datanodeDetails.toString(), e,
+          SCMException.ResultCodes.IO_EXCEPTION);
+    }
+  }
+
+  /**
+   * Close node pool level db store.
+   * @throws IOException
+   */
+  @Override
+  public void close() throws IOException {
+    nodePoolStore.close();
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/package-info.java
new file mode 100644
index 0000000..d6a8ad0
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/package-info.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.node;
+
+/**
+ * The node package deals with node management.
+ * <p/>
+ * The node manager takes care of node registrations, removal of node and
+ * handling of heartbeats.
+ * <p/>
+ * The node manager maintains statistics that gets send as part of
+ * heartbeats.
+ * <p/>
+ * The container manager polls the node manager to learn the state of
+ * datanodes  that it is interested in.
+ * <p/>
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
new file mode 100644
index 0000000..4669e74
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+/*
+ * This package contains StorageContainerManager classes.
+ */
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java
new file mode 100644
index 0000000..8e43528
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.pipelines;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Manage Ozone pipelines.
+ */
+public abstract class PipelineManager {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(PipelineManager.class);
+  private final List<PipelineChannel> activePipelineChannels;
+  private final AtomicInteger conduitsIndex;
+
+  public PipelineManager() {
+    activePipelineChannels = new LinkedList<>();
+    conduitsIndex = new AtomicInteger(0);
+  }
+
+  /**
+   * This function is called by the Container Manager while allocating a new
+   * container. The client specifies what kind of replication pipeline is
+   * needed and based on the replication type in the request appropriate
+   * Interface is invoked.
+   *
+   * @param containerName Name of the container
+   * @param replicationFactor - Replication Factor
+   * @return a Pipeline.
+   */
+  public synchronized final Pipeline getPipeline(String containerName,
+      ReplicationFactor replicationFactor, ReplicationType replicationType)
+      throws IOException {
+    /**
+     * In the Ozone world, we have a very simple policy.
+     *
+     * 1. Try to create a pipelineChannel if there are enough free nodes.
+     *
+     * 2. This allows all nodes to part of a pipelineChannel quickly.
+     *
+     * 3. if there are not enough free nodes, return conduits in a
+     * round-robin fashion.
+     *
+     * TODO: Might have to come up with a better algorithm than this.
+     * Create a new placement policy that returns conduits in round robin
+     * fashion.
+     */
+    PipelineChannel pipelineChannel =
+        allocatePipelineChannel(replicationFactor);
+    if (pipelineChannel != null) {
+      LOG.debug("created new pipelineChannel:{} for container:{}",
+          pipelineChannel.getName(), containerName);
+      activePipelineChannels.add(pipelineChannel);
+    } else {
+      pipelineChannel =
+          findOpenPipelineChannel(replicationType, replicationFactor);
+      if (pipelineChannel != null) {
+        LOG.debug("re-used pipelineChannel:{} for container:{}",
+            pipelineChannel.getName(), containerName);
+      }
+    }
+    if (pipelineChannel == null) {
+      LOG.error("Get pipelineChannel call failed. We are not able to find" +
+              "free nodes or operational pipelineChannel.");
+      return null;
+    } else {
+      return new Pipeline(containerName, pipelineChannel);
+    }
+  }
+
+  protected int getReplicationCount(ReplicationFactor factor) {
+    switch (factor) {
+    case ONE:
+      return 1;
+    case THREE:
+      return 3;
+    default:
+      throw new IllegalArgumentException("Unexpected replication count");
+    }
+  }
+
+  public abstract PipelineChannel allocatePipelineChannel(
+      ReplicationFactor replicationFactor) throws IOException;
+
+  /**
+   * Find a PipelineChannel that is operational.
+   *
+   * @return - Pipeline or null
+   */
+  private PipelineChannel findOpenPipelineChannel(
+      ReplicationType type, ReplicationFactor factor) {
+    PipelineChannel pipelineChannel = null;
+    final int sentinal = -1;
+    if (activePipelineChannels.size() == 0) {
+      LOG.error("No Operational conduits found. Returning null.");
+      return null;
+    }
+    int startIndex = getNextIndex();
+    int nextIndex = sentinal;
+    for (; startIndex != nextIndex; nextIndex = getNextIndex()) {
+      // Just walk the list in a circular way.
+      PipelineChannel temp =
+          activePipelineChannels
+              .get(nextIndex != sentinal ? nextIndex : startIndex);
+      // if we find an operational pipelineChannel just return that.
+      if ((temp.getLifeCycleState() == LifeCycleState.OPEN) &&
+          (temp.getFactor() == factor) && (temp.getType() == type)) {
+        pipelineChannel = temp;
+        break;
+      }
+    }
+    return pipelineChannel;
+  }
+
+  /**
+   * gets the next index of the PipelineChannel to get.
+   *
+   * @return index in the link list to get.
+   */
+  private int getNextIndex() {
+    return conduitsIndex.incrementAndGet() % activePipelineChannels.size();
+  }
+
+  /**
+   * Creates a pipeline from a specified set of Nodes.
+   * @param pipelineID - Name of the pipeline
+   * @param datanodes - The list of datanodes that make this pipeline.
+   */
+  public abstract void createPipeline(String pipelineID,
+      List<DatanodeDetails> datanodes) throws IOException;
+
+  /**
+   * Close the  pipeline with the given clusterId.
+   */
+  public abstract void closePipeline(String pipelineID) throws IOException;
+
+  /**
+   * list members in the pipeline .
+   * @return the datanode
+   */
+  public abstract List<DatanodeDetails> getMembers(String pipelineID)
+      throws IOException;
+
+  /**
+   * Update the datanode list of the pipeline.
+   */
+  public abstract void updatePipeline(String pipelineID,
+      List<DatanodeDetails> newDatanodes) throws IOException;
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java
new file mode 100644
index 0000000..f0c9eea
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java
@@ -0,0 +1,230 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.pipelines;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .SCMContainerPlacementRandom;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.pipelines.ratis.RatisManagerImpl;
+import org.apache.hadoop.hdds.scm.pipelines.standalone.StandaloneManagerImpl;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Sends the request to the right pipeline manager.
+ */
+public class PipelineSelector {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(PipelineSelector.class);
+  private final ContainerPlacementPolicy placementPolicy;
+  private final NodeManager nodeManager;
+  private final Configuration conf;
+  private final RatisManagerImpl ratisManager;
+  private final StandaloneManagerImpl standaloneManager;
+  private final long containerSize;
+
+  /**
+   * Constructs a pipeline Selector.
+   *
+   * @param nodeManager - node manager
+   * @param conf - Ozone Config
+   */
+  public PipelineSelector(NodeManager nodeManager, Configuration conf) {
+    this.nodeManager = nodeManager;
+    this.conf = conf;
+    this.placementPolicy = createContainerPlacementPolicy(nodeManager, conf);
+    this.containerSize = OzoneConsts.GB * this.conf.getInt(
+        ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB,
+        ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT);
+    this.standaloneManager =
+        new StandaloneManagerImpl(this.nodeManager, placementPolicy,
+            containerSize);
+    this.ratisManager =
+        new RatisManagerImpl(this.nodeManager, placementPolicy, containerSize,
+            conf);
+  }
+
+  /**
+   * Translates a list of nodes, ordered such that the first is the leader, into
+   * a corresponding {@link Pipeline} object.
+   *
+   * @param nodes - list of datanodes on which we will allocate the container.
+   * The first of the list will be the leader node.
+   * @return pipeline corresponding to nodes
+   */
+  public static PipelineChannel newPipelineFromNodes(
+      List<DatanodeDetails> nodes, LifeCycleState state,
+      ReplicationType replicationType, ReplicationFactor replicationFactor,
+      String name) {
+    Preconditions.checkNotNull(nodes);
+    Preconditions.checkArgument(nodes.size() > 0);
+    String leaderId = nodes.get(0).getUuidString();
+    PipelineChannel
+        pipelineChannel = new PipelineChannel(leaderId, state, replicationType,
+        replicationFactor, name);
+    for (DatanodeDetails node : nodes) {
+      pipelineChannel.addMember(node);
+    }
+    return pipelineChannel;
+  }
+
+  /**
+   * Create pluggable container placement policy implementation instance.
+   *
+   * @param nodeManager - SCM node manager.
+   * @param conf - configuration.
+   * @return SCM container placement policy implementation instance.
+   */
+  @SuppressWarnings("unchecked")
+  private static ContainerPlacementPolicy createContainerPlacementPolicy(
+      final NodeManager nodeManager, final Configuration conf) {
+    Class<? extends ContainerPlacementPolicy> implClass =
+        (Class<? extends ContainerPlacementPolicy>) conf.getClass(
+            ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
+            SCMContainerPlacementRandom.class);
+
+    try {
+      Constructor<? extends ContainerPlacementPolicy> ctor =
+          implClass.getDeclaredConstructor(NodeManager.class,
+              Configuration.class);
+      return ctor.newInstance(nodeManager, conf);
+    } catch (RuntimeException e) {
+      throw e;
+    } catch (InvocationTargetException e) {
+      throw new RuntimeException(implClass.getName()
+          + " could not be constructed.", e.getCause());
+    } catch (Exception e) {
+      LOG.error("Unhandled exception occurred, Placement policy will not be " +
+          "functional.");
+      throw new IllegalArgumentException("Unable to load " +
+          "ContainerPlacementPolicy", e);
+    }
+  }
+
+  /**
+   * Return the pipeline manager from the replication type.
+   *
+   * @param replicationType - Replication Type Enum.
+   * @return pipeline Manager.
+   * @throws IllegalArgumentException If an pipeline type gets added
+   * and this function is not modified we will throw.
+   */
+  private PipelineManager getPipelineManager(ReplicationType replicationType)
+      throws IllegalArgumentException {
+    switch (replicationType) {
+    case RATIS:
+      return this.ratisManager;
+    case STAND_ALONE:
+      return this.standaloneManager;
+    case CHAINED:
+      throw new IllegalArgumentException("Not implemented yet");
+    default:
+      throw new IllegalArgumentException("Unexpected enum found. Does not" +
+          " know how to handle " + replicationType.toString());
+    }
+
+  }
+
+  /**
+   * This function is called by the Container Manager while allocating a new
+   * container. The client specifies what kind of replication pipeline is needed
+   * and based on the replication type in the request appropriate Interface is
+   * invoked.
+   */
+
+  public Pipeline getReplicationPipeline(ReplicationType replicationType,
+      HddsProtos.ReplicationFactor replicationFactor, String containerName)
+      throws IOException {
+    PipelineManager manager = getPipelineManager(replicationType);
+    Preconditions.checkNotNull(manager, "Found invalid pipeline manager");
+    LOG.debug("Getting replication pipeline for {} : Replication {}",
+        containerName, replicationFactor.toString());
+    return manager.
+        getPipeline(containerName, replicationFactor, replicationType);
+  }
+
+  /**
+   * Creates a pipeline from a specified set of Nodes.
+   */
+
+  public void createPipeline(ReplicationType replicationType, String
+      pipelineID, List<DatanodeDetails> datanodes) throws IOException {
+    PipelineManager manager = getPipelineManager(replicationType);
+    Preconditions.checkNotNull(manager, "Found invalid pipeline manager");
+    LOG.debug("Creating a pipeline: {} with nodes:{}", pipelineID,
+        datanodes.stream().map(DatanodeDetails::toString)
+            .collect(Collectors.joining(",")));
+    manager.createPipeline(pipelineID, datanodes);
+  }
+
+  /**
+   * Close the  pipeline with the given clusterId.
+   */
+
+  public void closePipeline(ReplicationType replicationType, String
+      pipelineID) throws IOException {
+    PipelineManager manager = getPipelineManager(replicationType);
+    Preconditions.checkNotNull(manager, "Found invalid pipeline manager");
+    LOG.debug("Closing pipeline. pipelineID: {}", pipelineID);
+    manager.closePipeline(pipelineID);
+  }
+
+  /**
+   * list members in the pipeline .
+   */
+
+  public List<DatanodeDetails> getDatanodes(ReplicationType replicationType,
+      String pipelineID) throws IOException {
+    PipelineManager manager = getPipelineManager(replicationType);
+    Preconditions.checkNotNull(manager, "Found invalid pipeline manager");
+    LOG.debug("Getting data nodes from pipeline : {}", pipelineID);
+    return manager.getMembers(pipelineID);
+  }
+
+  /**
+   * Update the datanodes in the list of the pipeline.
+   */
+
+  public void updateDatanodes(ReplicationType replicationType, String
+      pipelineID, List<DatanodeDetails> newDatanodes) throws IOException {
+    PipelineManager manager = getPipelineManager(replicationType);
+    Preconditions.checkNotNull(manager, "Found invalid pipeline manager");
+    LOG.debug("Updating pipeline: {} with new nodes:{}", pipelineID,
+        newDatanodes.stream().map(DatanodeDetails::toString)
+            .collect(Collectors.joining(",")));
+    manager.updatePipeline(pipelineID, newDatanodes);
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/package-info.java
new file mode 100644
index 0000000..ea24c58
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/package-info.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.pipelines;
+/**
+ Ozone supports the notion of different kind of pipelines.
+ That means that we can have a replication pipeline build on
+ Ratis, Standalone or some other protocol. All Pipeline managers
+ the entities in charge of pipelines reside in the package.
+
+ Here is the high level Arch.
+
+ 1. A pipeline selector class is instantiated in the Container manager class.
+
+ 2. A client when creating a container -- will specify what kind of
+ replication type it wants to use. We support 2 types now, Ratis and StandAlone.
+
+ 3. Based on the replication type, the pipeline selector class asks the
+ corresponding pipeline manager for a pipeline.
+
+ 4. We have supported the ability for clients to specify a set of nodes in
+ the pipeline or rely in the pipeline manager to select the datanodes if they
+ are not specified.
+ */
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/RatisManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/RatisManagerImpl.java
new file mode 100644
index 0000000..089a137
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/RatisManagerImpl.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.pipelines.ratis;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.XceiverClientRatis;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.pipelines.PipelineManager;
+import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ * Implementation of {@link PipelineManager}.
+ *
+ * TODO : Introduce a state machine.
+ */
+public class RatisManagerImpl extends PipelineManager {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RatisManagerImpl.class);
+  private static final String PREFIX = "Ratis-";
+  private final Configuration conf;
+  private final NodeManager nodeManager;
+  private final Set<DatanodeDetails> ratisMembers;
+
+  /**
+   * Constructs a Ratis Pipeline Manager.
+   *
+   * @param nodeManager
+   */
+  public RatisManagerImpl(NodeManager nodeManager,
+      ContainerPlacementPolicy placementPolicy, long size, Configuration conf) {
+    super();
+    this.conf = conf;
+    this.nodeManager = nodeManager;
+    ratisMembers = new HashSet<>();
+  }
+
+  /**
+   * Allocates a new ratis PipelineChannel from the free nodes.
+   *
+   * @param factor - One or Three
+   * @return PipelineChannel.
+   */
+  public PipelineChannel allocatePipelineChannel(ReplicationFactor factor) {
+    List<DatanodeDetails> newNodesList = new LinkedList<>();
+    List<DatanodeDetails> datanodes = nodeManager.getNodes(NodeState.HEALTHY);
+    int count = getReplicationCount(factor);
+    //TODO: Add Raft State to the Nodes, so we can query and skip nodes from
+    // data from datanode instead of maintaining a set.
+    for (DatanodeDetails datanode : datanodes) {
+      Preconditions.checkNotNull(datanode);
+      if (!ratisMembers.contains(datanode)) {
+        newNodesList.add(datanode);
+        if (newNodesList.size() == count) {
+          // once a datanode has been added to a pipeline, exclude it from
+          // further allocations
+          ratisMembers.addAll(newNodesList);
+          LOG.info("Allocating a new ratis pipelineChannel of size: {}", count);
+          // Start all channel names with "Ratis", easy to grep the logs.
+          String conduitName = PREFIX +
+              UUID.randomUUID().toString().substring(PREFIX.length());
+          PipelineChannel pipelineChannel =
+              PipelineSelector.newPipelineFromNodes(newNodesList,
+              LifeCycleState.OPEN, ReplicationType.RATIS, factor, conduitName);
+          Pipeline pipeline =
+              new Pipeline("setup", pipelineChannel);
+          try (XceiverClientRatis client =
+              XceiverClientRatis.newXceiverClientRatis(pipeline, conf)) {
+            client.createPipeline(pipeline.getPipelineName(), newNodesList);
+          } catch (IOException e) {
+            return null;
+          }
+          return pipelineChannel;
+        }
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Creates a pipeline from a specified set of Nodes.
+   *
+   * @param pipelineID - Name of the pipeline
+   * @param datanodes - The list of datanodes that make this pipeline.
+   */
+  @Override
+  public void createPipeline(String pipelineID,
+                             List<DatanodeDetails> datanodes) {
+
+  }
+
+  /**
+   * Close the  pipeline with the given clusterId.
+   *
+   * @param pipelineID
+   */
+  @Override
+  public void closePipeline(String pipelineID) throws IOException {
+
+  }
+
+  /**
+   * list members in the pipeline .
+   *
+   * @param pipelineID
+   * @return the datanode
+   */
+  @Override
+  public List<DatanodeDetails> getMembers(String pipelineID)
+      throws IOException {
+    return null;
+  }
+
+  /**
+   * Update the datanode list of the pipeline.
+   *
+   * @param pipelineID
+   * @param newDatanodes
+   */
+  @Override
+  public void updatePipeline(String pipelineID,
+                             List<DatanodeDetails> newDatanodes)
+      throws IOException {
+
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/package-info.java
new file mode 100644
index 0000000..2970fb3
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/ratis/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.pipelines.ratis;
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/StandaloneManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/StandaloneManagerImpl.java
new file mode 100644
index 0000000..8268329
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/StandaloneManagerImpl.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.pipelines.standalone;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.pipelines.PipelineManager;
+import org.apache.hadoop.hdds.scm.pipelines.PipelineSelector;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ * Standalone Manager Impl to prove that pluggable interface
+ * works with current tests.
+ */
+public class StandaloneManagerImpl extends PipelineManager {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(StandaloneManagerImpl.class);
+  private final NodeManager nodeManager;
+  private final ContainerPlacementPolicy placementPolicy;
+  private final long containerSize;
+  private final Set<DatanodeDetails> standAloneMembers;
+
+  /**
+   * Constructor for Standalone Node Manager Impl.
+   * @param nodeManager - Node Manager.
+   * @param placementPolicy - Placement Policy
+   * @param containerSize - Container Size.
+   */
+  public StandaloneManagerImpl(NodeManager nodeManager,
+      ContainerPlacementPolicy placementPolicy, long containerSize) {
+    super();
+    this.nodeManager = nodeManager;
+    this.placementPolicy = placementPolicy;
+    this.containerSize =  containerSize;
+    this.standAloneMembers = new HashSet<>();
+  }
+
+
+  /**
+   * Allocates a new standalone PipelineChannel from the free nodes.
+   *
+   * @param factor - One
+   * @return PipelineChannel.
+   */
+  public PipelineChannel allocatePipelineChannel(ReplicationFactor factor) {
+    List<DatanodeDetails> newNodesList = new LinkedList<>();
+    List<DatanodeDetails> datanodes = nodeManager.getNodes(NodeState.HEALTHY);
+    int count = getReplicationCount(factor);
+    for (DatanodeDetails datanode : datanodes) {
+      Preconditions.checkNotNull(datanode);
+      if (!standAloneMembers.contains(datanode)) {
+        newNodesList.add(datanode);
+        if (newNodesList.size() == count) {
+          // once a datanode has been added to a pipeline, exclude it from
+          // further allocations
+          standAloneMembers.addAll(newNodesList);
+          LOG.info("Allocating a new standalone pipeline channel of size: {}",
+              count);
+          String channelName =
+              "SA-" + UUID.randomUUID().toString().substring(3);
+          return PipelineSelector.newPipelineFromNodes(newNodesList,
+              LifeCycleState.OPEN, ReplicationType.STAND_ALONE,
+              ReplicationFactor.ONE, channelName);
+        }
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Creates a pipeline from a specified set of Nodes.
+   *
+   * @param pipelineID - Name of the pipeline
+   * @param datanodes - The list of datanodes that make this pipeline.
+   */
+  @Override
+  public void createPipeline(String pipelineID,
+                             List<DatanodeDetails> datanodes) {
+    //return newPipelineFromNodes(datanodes, pipelineID);
+  }
+
+  /**
+   * Close the  pipeline with the given clusterId.
+   *
+   * @param pipelineID
+   */
+  @Override
+  public void closePipeline(String pipelineID) throws IOException {
+
+  }
+
+  /**
+   * list members in the pipeline .
+   *
+   * @param pipelineID
+   * @return the datanode
+   */
+  @Override
+  public List<DatanodeDetails> getMembers(String pipelineID)
+      throws IOException {
+    return null;
+  }
+
+  /**
+   * Update the datanode list of the pipeline.
+   *
+   * @param pipelineID
+   * @param newDatanodes
+   */
+  @Override
+  public void updatePipeline(String pipelineID, List<DatanodeDetails>
+      newDatanodes) throws IOException {
+
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/package-info.java
new file mode 100644
index 0000000..b2c3ca40
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/standalone/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.pipelines.standalone;
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ratis/package-info.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ratis/package-info.java
new file mode 100644
index 0000000..4944017
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ratis/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.ratis;
+
+/**
+ * This package contains classes related to Apache Ratis for SCM.
+ */
diff --git a/hadoop-hdds/server-scm/src/main/webapps/scm/index.html b/hadoop-hdds/server-scm/src/main/webapps/scm/index.html
new file mode 100644
index 0000000..3407f51
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/webapps/scm/index.html
@@ -0,0 +1,76 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<html lang="en">
+<head>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+    <!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
+    <meta name="description" content="HDFS Storage Container Manager">
+
+    <title>HDFS Storage Container Manager</title>
+
+    <link href="static/bootstrap-3.0.2/css/bootstrap.min.css" rel="stylesheet">
+    <link href="static/hadoop.css" rel="stylesheet">
+    <link href="static/nvd3-1.8.5.min.css" rel="stylesheet">
+
+    <link href="static/ozone.css" rel="stylesheet">
+
+</head>
+
+<body ng-app="scm">
+
+<header class="navbar navbar-inverse navbar-fixed-top bs-docs-nav">
+    <div class="container-fluid">
+        <div class="navbar-header">
+            <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar"
+                    aria-expanded="false" aria-controls="navbar">
+                <span class="sr-only">Toggle navigation</span>
+                <span class="icon-bar"></span>
+                <span class="icon-bar"></span>
+                <span class="icon-bar"></span>
+            </button>
+            <a class="navbar-brand" href="#">HDFS SCM</a>
+        </div>
+
+
+        <navmenu
+                metrics="{ 'Rpc metrics' : '#!/metrics/rpc'}"></navmenu>
+
+
+    </div>
+</header>
+
+<div class="container-fluid" style="margin: 12pt">
+
+    <ng-view></ng-view>
+
+</div><!-- /.container -->
+
+<script src="static/jquery-1.10.2.min.js"></script>
+<script src="static/angular-1.6.4.min.js"></script>
+<script src="static/angular-route-1.6.4.min.js"></script>
+<script src="static/d3-3.5.17.min.js"></script>
+<script src="static/nvd3-1.8.5.min.js"></script>
+<script src="static/angular-nvd3-1.0.9.min.js"></script>
+<script src="static/ozone.js"></script>
+<script src="scm.js"></script>
+<script src="static/bootstrap-3.0.2/js/bootstrap.min.js"></script>
+</body>
+</html>
diff --git a/hadoop-hdds/server-scm/src/main/webapps/scm/main.html b/hadoop-hdds/server-scm/src/main/webapps/scm/main.html
new file mode 100644
index 0000000..2666f81
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/webapps/scm/main.html
@@ -0,0 +1,20 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+  -->
+<overview>
+    <scm-overview>
+    </scm-overview>
+</overview>
diff --git a/hadoop-hdds/server-scm/src/main/webapps/scm/scm-overview.html b/hadoop-hdds/server-scm/src/main/webapps/scm/scm-overview.html
new file mode 100644
index 0000000..fca23ba
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/webapps/scm/scm-overview.html
@@ -0,0 +1,60 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<h2>Node counts</h2>
+
+<table class="table table-bordered table-striped" class="col-md-6">
+    <tbody>
+    <tr ng-repeat="typestat in $ctrl.nodemanagermetrics.NodeCount | orderBy:'key':false:$ctrl.nodeOrder">
+        <td>{{typestat.key}}</td>
+        <td>{{typestat.value}}</td>
+    </tr>
+    </tbody>
+</table>
+
+<h2>Status</h2>
+<table class="table table-bordered table-striped" class="col-md-6">
+    <tbody>
+    <tr>
+        <td>Client Rpc port</td>
+        <td>{{$ctrl.overview.jmx.ClientRpcPort}}</td>
+    </tr>
+    <tr>
+        <td>Datanode Rpc port</td>
+        <td>{{$ctrl.overview.jmx.DatanodeRpcPort}}</td>
+    </tr>
+    <tr>
+        <td>Block Manager: Open containers</td>
+        <td>{{$ctrl.blockmanagermetrics.OpenContainersNo}}</td>
+    </tr>
+    <tr>
+        <td>Node Manager: Minimum chill mode nodes</td>
+        <td>{{$ctrl.nodemanagermetrics.MinimumChillModeNodes}}</td>
+    </tr>
+    <tr>
+        <td>Node Manager: Out-of-node chill mode</td>
+        <td>{{$ctrl.nodemanagermetrics.OutOfNodeChillMode}}</td>
+    </tr>
+    <tr>
+        <td>Node Manager: Chill mode status</td>
+        <td>{{$ctrl.nodemanagermetrics.ChillModeStatus}}</td>
+    </tr>
+    <tr>
+        <td>Node Manager: Manual chill mode</td>
+        <td>{{$ctrl.nodemanagermetrics.InManualChillMode}}</td>
+    </tr>
+    </tbody>
+</table>
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/main/webapps/scm/scm.js b/hadoop-hdds/server-scm/src/main/webapps/scm/scm.js
new file mode 100644
index 0000000..bcfa8b7
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/webapps/scm/scm.js
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function () {
+    "use strict";
+    angular.module('scm', ['ozone', 'nvd3']);
+
+    angular.module('scm').component('scmOverview', {
+        templateUrl: 'scm-overview.html',
+        require: {
+            overview: "^overview"
+        },
+        controller: function ($http) {
+            var ctrl = this;
+            $http.get("jmx?qry=Hadoop:service=BlockManager,name=*")
+                .then(function (result) {
+                    ctrl.blockmanagermetrics = result.data.beans[0];
+                });
+            $http.get("jmx?qry=Hadoop:service=SCMNodeManager,name=SCMNodeManagerInfo")
+                .then(function (result) {
+                    ctrl.nodemanagermetrics = result.data.beans[0];
+                });
+
+            var statusSortOrder = {
+                "HEALTHY": "a",
+                "STALE": "b",
+                "DEAD": "c",
+                "UNKNOWN": "z",
+                "DECOMMISSIONING": "x",
+                "DECOMMISSIONED": "y"
+            };
+            ctrl.nodeOrder = function (v1, v2) {
+                //status with non defined sort order will be "undefined"
+                return ("" + statusSortOrder[v1.value]).localeCompare("" + statusSortOrder[v2.value])
+            }
+
+        }
+    });
+
+})();
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/HddsServerUtilTest.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/HddsServerUtilTest.java
new file mode 100644
index 0000000..6e01e53
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/HddsServerUtilTest.java
@@ -0,0 +1,308 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.rules.Timeout;
+
+import java.net.InetSocketAddress;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+
+import static org.apache.hadoop.hdds.HddsUtils.getSCMAddresses;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test the HDDS server side utilities.
+ */
+public class HddsServerUtilTest {
+
+  @Rule
+  public Timeout timeout = new Timeout(300000);
+
+  @Rule
+  public ExpectedException thrown= ExpectedException.none();
+
+  /**
+   * Verify DataNode endpoint lookup failure if neither the client nor
+   * datanode endpoint are configured.
+   */
+  @Test
+  public void testMissingScmDataNodeAddress() {
+    final Configuration conf = new OzoneConfiguration();
+    thrown.expect(IllegalArgumentException.class);
+    HddsServerUtil.getScmAddressForDataNodes(conf);
+  }
+
+  /**
+   * Verify that the datanode endpoint is parsed correctly.
+   * This tests the logic used by the DataNodes to determine which address
+   * to connect to.
+   */
+  @Test
+  public void testGetScmDataNodeAddress() {
+    final Configuration conf = new OzoneConfiguration();
+
+    // First try a client address with just a host name. Verify it falls
+    // back to the default port.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4");
+    InetSocketAddress addr = HddsServerUtil.getScmAddressForDataNodes(conf);
+    assertThat(addr.getHostString(), is("1.2.3.4"));
+    assertThat(addr.getPort(), is(
+        ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT));
+
+    // Next try a client address with just a host name and port.
+    // Verify the port is ignored and the default DataNode port is used.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    addr = HddsServerUtil.getScmAddressForDataNodes(conf);
+    assertThat(addr.getHostString(), is("1.2.3.4"));
+    assertThat(addr.getPort(), is(
+        ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT));
+
+    // Set both OZONE_SCM_CLIENT_ADDRESS_KEY and
+    // OZONE_SCM_DATANODE_ADDRESS_KEY.
+    // Verify that the latter overrides and the port number is still the
+    // default.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "5.6.7.8");
+    addr =
+        HddsServerUtil.getScmAddressForDataNodes(conf);
+    assertThat(addr.getHostString(), is("5.6.7.8"));
+    assertThat(addr.getPort(), is(
+        ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT));
+
+    // Set both OZONE_SCM_CLIENT_ADDRESS_KEY and
+    // OZONE_SCM_DATANODE_ADDRESS_KEY.
+    // Verify that the latter overrides and the port number from the latter is
+    // used.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "5.6.7.8:200");
+    addr = HddsServerUtil.getScmAddressForDataNodes(conf);
+    assertThat(addr.getHostString(), is("5.6.7.8"));
+    assertThat(addr.getPort(), is(200));
+  }
+
+
+  /**
+   * Verify that the client endpoint bind address is computed correctly.
+   * This tests the logic used by the SCM to determine its own bind address.
+   */
+  @Test
+  public void testScmClientBindHostDefault() {
+    final Configuration conf = new OzoneConfiguration();
+
+    // The bind host should be 0.0.0.0 unless OZONE_SCM_CLIENT_BIND_HOST_KEY
+    // is set differently.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4");
+    InetSocketAddress addr = HddsServerUtil.getScmClientBindAddress(conf);
+    assertThat(addr.getHostString(), is("0.0.0.0"));
+    assertThat(addr.getPort(), is(ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT));
+
+    // The bind host should be 0.0.0.0 unless OZONE_SCM_CLIENT_BIND_HOST_KEY
+    // is set differently. The port number from OZONE_SCM_CLIENT_ADDRESS_KEY
+    // should be respected.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200");
+    addr = HddsServerUtil.getScmClientBindAddress(conf);
+    assertThat(addr.getHostString(), is("0.0.0.0"));
+    assertThat(addr.getPort(), is(100));
+
+    // OZONE_SCM_CLIENT_BIND_HOST_KEY should be respected.
+    // Port number should be default if none is specified via
+    // OZONE_SCM_DATANODE_ADDRESS_KEY.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4");
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY, "5.6.7.8");
+    addr = HddsServerUtil.getScmClientBindAddress(conf);
+    assertThat(addr.getHostString(), is("5.6.7.8"));
+    assertThat(addr.getPort(), is(
+        ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT));
+
+    // OZONE_SCM_CLIENT_BIND_HOST_KEY should be respected.
+    // Port number from OZONE_SCM_CLIENT_ADDRESS_KEY should be
+    // respected.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200");
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY, "5.6.7.8");
+    addr = HddsServerUtil.getScmClientBindAddress(conf);
+    assertThat(addr.getHostString(), is("5.6.7.8"));
+    assertThat(addr.getPort(), is(100));
+  }
+
+  /**
+   * Verify that the DataNode endpoint bind address is computed correctly.
+   * This tests the logic used by the SCM to determine its own bind address.
+   */
+  @Test
+  public void testScmDataNodeBindHostDefault() {
+    final Configuration conf = new OzoneConfiguration();
+
+    // The bind host should be 0.0.0.0 unless OZONE_SCM_DATANODE_BIND_HOST_KEY
+    // is set differently.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4");
+    InetSocketAddress addr = HddsServerUtil.getScmDataNodeBindAddress(conf);
+    assertThat(addr.getHostString(), is("0.0.0.0"));
+    assertThat(addr.getPort(), is(
+        ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT));
+
+    // The bind host should be 0.0.0.0 unless OZONE_SCM_DATANODE_BIND_HOST_KEY
+    // is set differently. The port number from OZONE_SCM_DATANODE_ADDRESS_KEY
+    // should be respected.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200");
+    addr = HddsServerUtil.getScmDataNodeBindAddress(conf);
+    assertThat(addr.getHostString(), is("0.0.0.0"));
+    assertThat(addr.getPort(), is(200));
+
+    // OZONE_SCM_DATANODE_BIND_HOST_KEY should be respected.
+    // Port number should be default if none is specified via
+    // OZONE_SCM_DATANODE_ADDRESS_KEY.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_KEY, "5.6.7.8");
+    addr = HddsServerUtil.getScmDataNodeBindAddress(conf);
+    assertThat(addr.getHostString(), is("5.6.7.8"));
+    assertThat(addr.getPort(), is(
+        ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT));
+
+    // OZONE_SCM_DATANODE_BIND_HOST_KEY should be respected.
+    // Port number from OZONE_SCM_DATANODE_ADDRESS_KEY should be
+    // respected.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "1.2.3.4:200");
+    conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_KEY, "5.6.7.8");
+    addr = HddsServerUtil.getScmDataNodeBindAddress(conf);
+    assertThat(addr.getHostString(), is("5.6.7.8"));
+    assertThat(addr.getPort(), is(200));
+  }
+
+
+
+  @Test
+  public void testGetSCMAddresses() {
+    final Configuration conf = new OzoneConfiguration();
+    Collection<InetSocketAddress> addresses = null;
+    InetSocketAddress addr = null;
+    Iterator<InetSocketAddress> it = null;
+
+    // Verify valid IP address setup
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, "1.2.3.4");
+    addresses = getSCMAddresses(conf);
+    assertThat(addresses.size(), is(1));
+    addr = addresses.iterator().next();
+    assertThat(addr.getHostName(), is("1.2.3.4"));
+    assertThat(addr.getPort(), is(ScmConfigKeys.OZONE_SCM_DEFAULT_PORT));
+
+    // Verify valid hostname setup
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, "scm1");
+    addresses = getSCMAddresses(conf);
+    assertThat(addresses.size(), is(1));
+    addr = addresses.iterator().next();
+    assertThat(addr.getHostName(), is("scm1"));
+    assertThat(addr.getPort(), is(ScmConfigKeys.OZONE_SCM_DEFAULT_PORT));
+
+    // Verify valid hostname and port
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, "scm1:1234");
+    addresses = getSCMAddresses(conf);
+    assertThat(addresses.size(), is(1));
+    addr = addresses.iterator().next();
+    assertThat(addr.getHostName(), is("scm1"));
+    assertThat(addr.getPort(), is(1234));
+
+    final HashMap<String, Integer> hostsAndPorts =
+        new HashMap<String, Integer>();
+    hostsAndPorts.put("scm1", 1234);
+    hostsAndPorts.put("scm2", 2345);
+    hostsAndPorts.put("scm3", 3456);
+
+    // Verify multiple hosts and port
+    conf.setStrings(
+        ScmConfigKeys.OZONE_SCM_NAMES, "scm1:1234,scm2:2345,scm3:3456");
+    addresses = getSCMAddresses(conf);
+    assertThat(addresses.size(), is(3));
+    it = addresses.iterator();
+    HashMap<String, Integer> expected1 = new HashMap<>(hostsAndPorts);
+    while(it.hasNext()) {
+      InetSocketAddress current = it.next();
+      assertTrue(expected1.remove(current.getHostName(),
+          current.getPort()));
+    }
+    assertTrue(expected1.isEmpty());
+
+    // Verify names with spaces
+    conf.setStrings(
+        ScmConfigKeys.OZONE_SCM_NAMES, " scm1:1234, scm2:2345 , scm3:3456 ");
+    addresses = getSCMAddresses(conf);
+    assertThat(addresses.size(), is(3));
+    it = addresses.iterator();
+    HashMap<String, Integer> expected2 = new HashMap<>(hostsAndPorts);
+    while(it.hasNext()) {
+      InetSocketAddress current = it.next();
+      assertTrue(expected2.remove(current.getHostName(),
+          current.getPort()));
+    }
+    assertTrue(expected2.isEmpty());
+
+    // Verify empty value
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, "");
+    try {
+      addresses = getSCMAddresses(conf);
+      fail("Empty value should cause an IllegalArgumentException");
+    } catch (Exception e) {
+      assertTrue(e instanceof IllegalArgumentException);
+    }
+
+    // Verify invalid hostname
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, "s..x..:1234");
+    try {
+      addresses = getSCMAddresses(conf);
+      fail("An invalid hostname should cause an IllegalArgumentException");
+    } catch (Exception e) {
+      assertTrue(e instanceof IllegalArgumentException);
+    }
+
+    // Verify invalid port
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, "scm:xyz");
+    try {
+      addresses = getSCMAddresses(conf);
+      fail("An invalid port should cause an IllegalArgumentException");
+    } catch (Exception e) {
+      assertTrue(e instanceof IllegalArgumentException);
+    }
+
+    // Verify a mixed case (valid and invalid value both appears)
+    conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, "scm1:1234, scm:xyz");
+    try {
+      addresses = getSCMAddresses(conf);
+      fail("An invalid value should cause an IllegalArgumentException");
+    } catch (Exception e) {
+      assertTrue(e instanceof IllegalArgumentException);
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManagerHttpServer.java
new file mode 100644
index 0000000..5ca9dd7
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestStorageContainerManagerHttpServer.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.web.URLConnectionFactory;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.http.HttpConfig.Policy;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Arrays;
+import java.util.Collection;
+
+/**
+ * Test http server os SCM with various HTTP option.
+ */
+@RunWith(value = Parameterized.class)
+public class TestStorageContainerManagerHttpServer {
+  private static final String BASEDIR = GenericTestUtils
+      .getTempPath(TestStorageContainerManagerHttpServer.class.getSimpleName());
+  private static String keystoresDir;
+  private static String sslConfDir;
+  private static Configuration conf;
+  private static URLConnectionFactory connectionFactory;
+
+  @Parameters public static Collection<Object[]> policy() {
+    Object[][] params = new Object[][] {
+        {HttpConfig.Policy.HTTP_ONLY},
+        {HttpConfig.Policy.HTTPS_ONLY},
+        {HttpConfig.Policy.HTTP_AND_HTTPS} };
+    return Arrays.asList(params);
+  }
+
+  private final HttpConfig.Policy policy;
+
+  public TestStorageContainerManagerHttpServer(Policy policy) {
+    super();
+    this.policy = policy;
+  }
+
+  @BeforeClass public static void setUp() throws Exception {
+    File base = new File(BASEDIR);
+    FileUtil.fullyDelete(base);
+    base.mkdirs();
+    conf = new Configuration();
+    keystoresDir = new File(BASEDIR).getAbsolutePath();
+    sslConfDir = KeyStoreTestUtil.getClasspathDir(
+        TestStorageContainerManagerHttpServer.class);
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
+    connectionFactory =
+        URLConnectionFactory.newDefaultURLConnectionFactory(conf);
+    conf.set(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
+        KeyStoreTestUtil.getClientSSLConfigFileName());
+    conf.set(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+        KeyStoreTestUtil.getServerSSLConfigFileName());
+  }
+
+  @AfterClass public static void tearDown() throws Exception {
+    FileUtil.fullyDelete(new File(BASEDIR));
+    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
+  }
+
+  @Test public void testHttpPolicy() throws Exception {
+    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
+    conf.set(ScmConfigKeys.OZONE_SCM_HTTPS_ADDRESS_KEY, "localhost:0");
+
+    InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 0);
+    StorageContainerManagerHttpServer server = null;
+    try {
+      server = new StorageContainerManagerHttpServer(conf);
+      server.start();
+
+      Assert.assertTrue(implies(policy.isHttpEnabled(),
+          canAccess("http", server.getHttpAddress())));
+      Assert.assertTrue(
+          implies(!policy.isHttpEnabled(), server.getHttpAddress() == null));
+
+      Assert.assertTrue(implies(policy.isHttpsEnabled(),
+          canAccess("https", server.getHttpsAddress())));
+      Assert.assertTrue(
+          implies(!policy.isHttpsEnabled(), server.getHttpsAddress() == null));
+
+    } finally {
+      if (server != null) {
+        server.stop();
+      }
+    }
+  }
+
+  private static boolean canAccess(String scheme, InetSocketAddress addr) {
+    if (addr == null) {
+      return false;
+    }
+    try {
+      URL url =
+          new URL(scheme + "://" + NetUtils.getHostPortString(addr) + "/jmx");
+      URLConnection conn = connectionFactory.openConnection(url);
+      conn.connect();
+      conn.getContent();
+    } catch (Exception e) {
+      return false;
+    }
+    return true;
+  }
+
+  private static boolean implies(boolean a, boolean b) {
+    return !a || b;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java
new file mode 100644
index 0000000..40d8a69
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm;
+
+import org.apache.hadoop.hdds.scm.node.SCMNodeManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.UUID;
+
+/**
+ * Stateless helper functions to handler scm/datanode connection.
+ */
+public final class TestUtils {
+
+  private TestUtils() {
+  }
+
+  public static DatanodeDetails getDatanodeDetails(SCMNodeManager nodeManager) {
+
+    return getDatanodeDetails(nodeManager, UUID.randomUUID().toString());
+  }
+
+  /**
+   * Create a new DatanodeDetails with NodeID set to the string.
+   *
+   * @param uuid - node ID, it is generally UUID.
+   * @return DatanodeID.
+   */
+  public static DatanodeDetails getDatanodeDetails(SCMNodeManager nodeManager,
+      String uuid) {
+    DatanodeDetails datanodeDetails = getDatanodeDetails(uuid);
+    nodeManager.register(datanodeDetails.getProtoBufMessage());
+    return datanodeDetails;
+  }
+
+  /**
+   * Get specified number of DatanodeDetails and registered them with node
+   * manager.
+   *
+   * @param nodeManager - node manager to register the datanode ids.
+   * @param count       - number of DatanodeDetails needed.
+   * @return
+   */
+  public static List<DatanodeDetails> getListOfRegisteredDatanodeDetails(
+      SCMNodeManager nodeManager, int count) {
+    ArrayList<DatanodeDetails> datanodes = new ArrayList<>();
+    for (int i = 0; i < count; i++) {
+      datanodes.add(getDatanodeDetails(nodeManager));
+    }
+    return datanodes;
+  }
+
+  /**
+   * Get a datanode details.
+   *
+   * @return DatanodeDetails
+   */
+  public static DatanodeDetails getDatanodeDetails() {
+    return getDatanodeDetails(UUID.randomUUID().toString());
+  }
+
+  private static DatanodeDetails getDatanodeDetails(String uuid) {
+    Random random = new Random();
+    String ipAddress =
+        random.nextInt(256) + "." + random.nextInt(256) + "." + random
+            .nextInt(256) + "." + random.nextInt(256);
+
+    String hostName = uuid;
+    DatanodeDetails.Builder builder = DatanodeDetails.newBuilder();
+    builder.setUuid(uuid)
+        .setHostName("localhost")
+        .setIpAddress(ipAddress)
+        .setContainerPort(0)
+        .setRatisPort(0)
+        .setOzoneRestPort(0);
+    return builder.build();
+  }
+
+  /**
+   * Get specified number of list of DatanodeDetails.
+   *
+   * @param count - number of datanode IDs needed.
+   * @return
+   */
+  public static List<DatanodeDetails> getListOfDatanodeDetails(int count) {
+    ArrayList<DatanodeDetails> datanodes = new ArrayList<>();
+    for (int i = 0; i < count; i++) {
+      datanodes.add(getDatanodeDetails());
+    }
+    return datanodes;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
new file mode 100644
index 0000000..0eff702
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.block;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.ContainerMapping;
+import org.apache.hadoop.hdds.scm.container.MockNodeManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.SCMTestUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.Collections;
+import java.util.UUID;
+
+import static org.apache.hadoop.ozone.OzoneConsts.GB;
+import static org.apache.hadoop.ozone.OzoneConsts.MB;
+
+
+/**
+ * Tests for SCM Block Manager.
+ */
+public class TestBlockManager {
+  private static ContainerMapping mapping;
+  private static MockNodeManager nodeManager;
+  private static BlockManagerImpl blockManager;
+  private static File testDir;
+  private final static long DEFAULT_BLOCK_SIZE = 128 * MB;
+  private static HddsProtos.ReplicationFactor factor;
+  private static HddsProtos.ReplicationType type;
+  private static String containerOwner = "OZONE";
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+  @BeforeClass
+  public static void setUp() throws Exception {
+    Configuration conf = SCMTestUtils.getConf();
+
+    String path = GenericTestUtils
+        .getTempPath(TestBlockManager.class.getSimpleName());
+
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, path);
+    testDir = Paths.get(path).toFile();
+    boolean folderExisted = testDir.exists() || testDir.mkdirs();
+    if (!folderExisted) {
+      throw new IOException("Unable to create test directory path");
+    }
+    nodeManager = new MockNodeManager(true, 10);
+    mapping = new ContainerMapping(conf, nodeManager, 128);
+    blockManager = new BlockManagerImpl(conf, nodeManager, mapping, 128);
+    if(conf.getBoolean(ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY,
+        ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT)){
+      factor = HddsProtos.ReplicationFactor.THREE;
+      type = HddsProtos.ReplicationType.RATIS;
+    } else {
+      factor = HddsProtos.ReplicationFactor.ONE;
+      type = HddsProtos.ReplicationType.STAND_ALONE;
+    }
+  }
+
+  @AfterClass
+  public static void cleanup() throws IOException {
+    blockManager.close();
+    mapping.close();
+    FileUtil.fullyDelete(testDir);
+  }
+
+  @Before
+  public void clearChillMode() {
+    nodeManager.setChillmode(false);
+  }
+
+  @Test
+  public void testAllocateBlock() throws Exception {
+    AllocatedBlock block = blockManager.allocateBlock(DEFAULT_BLOCK_SIZE,
+        type, factor, containerOwner);
+    Assert.assertNotNull(block);
+  }
+
+  @Test
+  public void testGetAllocatedBlock() throws IOException {
+    AllocatedBlock block = blockManager.allocateBlock(DEFAULT_BLOCK_SIZE,
+        type, factor, containerOwner);
+    Assert.assertNotNull(block);
+    Pipeline pipeline = blockManager.getBlock(block.getKey());
+    Assert.assertEquals(pipeline.getLeader().getUuid(),
+        block.getPipeline().getLeader().getUuid());
+  }
+
+  @Test
+  public void testDeleteBlock() throws Exception {
+    AllocatedBlock block = blockManager.allocateBlock(DEFAULT_BLOCK_SIZE,
+        type, factor, containerOwner);
+    Assert.assertNotNull(block);
+    blockManager.deleteBlocks(Collections.singletonList(block.getKey()));
+
+    // Deleted block can not be retrieved
+    thrown.expectMessage("Specified block key does not exist.");
+    blockManager.getBlock(block.getKey());
+
+    // Tombstone of the deleted block can be retrieved if it has not been
+    // cleaned yet.
+    String deletedKeyName = blockManager.getDeletedKeyName(block.getKey());
+    Pipeline pipeline = blockManager.getBlock(deletedKeyName);
+    Assert.assertEquals(pipeline.getLeader().getUuid(),
+        block.getPipeline().getLeader().getUuid());
+  }
+
+  @Test
+  public void testAllocateOversizedBlock() throws IOException {
+    long size = 6 * GB;
+    thrown.expectMessage("Unsupported block size");
+    AllocatedBlock block = blockManager.allocateBlock(size,
+        type, factor, containerOwner);
+  }
+
+  @Test
+  public void testGetNoneExistentContainer() throws IOException {
+    String nonExistBlockKey = UUID.randomUUID().toString();
+    thrown.expectMessage("Specified block key does not exist.");
+    blockManager.getBlock(nonExistBlockKey);
+  }
+
+  @Test
+  public void testChillModeAllocateBlockFails() throws IOException {
+    nodeManager.setChillmode(true);
+    thrown.expectMessage("Unable to create block while in chill mode");
+    blockManager.allocateBlock(DEFAULT_BLOCK_SIZE,
+        type, factor, containerOwner);
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
new file mode 100644
index 0000000..77030cd
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
@@ -0,0 +1,363 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.block;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hdds.scm.container.ContainerMapping;
+import org.apache.hadoop.hdds.scm.container.Mapping;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.utils.MetadataStore;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.stream.Collectors;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_BLOCK_DELETION_MAX_RETRY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+import static org.mockito.Mockito.mock;
+
+/**
+ * Tests for DeletedBlockLog.
+ */
+public class TestDeletedBlockLog {
+
+  private static DeletedBlockLogImpl deletedBlockLog;
+  private OzoneConfiguration conf;
+  private File testDir;
+
+  @Before
+  public void setup() throws Exception {
+    testDir = GenericTestUtils.getTestDir(
+        TestDeletedBlockLog.class.getSimpleName());
+    conf = new OzoneConfiguration();
+    conf.setInt(OZONE_SCM_BLOCK_DELETION_MAX_RETRY, 20);
+    conf.set(OZONE_METADATA_DIRS, testDir.getAbsolutePath());
+    deletedBlockLog = new DeletedBlockLogImpl(conf);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    deletedBlockLog.close();
+    FileUtils.deleteDirectory(testDir);
+  }
+
+  private Map<String, List<String>> generateData(int dataSize) {
+    Map<String, List<String>> blockMap = new HashMap<>();
+    Random random = new Random(1);
+    for (int i = 0; i < dataSize; i++) {
+      String containerName = "container-" + UUID.randomUUID().toString();
+      List<String> blocks = new ArrayList<>();
+      int blockSize = random.nextInt(30) + 1;
+      for (int j = 0; j < blockSize; j++)  {
+        blocks.add("block-" + UUID.randomUUID().toString());
+      }
+      blockMap.put(containerName, blocks);
+    }
+    return blockMap;
+  }
+
+  @Test
+  public void testGetTransactions() throws Exception {
+    List<DeletedBlocksTransaction> blocks =
+        deletedBlockLog.getTransactions(30);
+    Assert.assertEquals(0, blocks.size());
+
+    // Creates 40 TX in the log.
+    for (Map.Entry<String, List<String>> entry : generateData(40).entrySet()){
+      deletedBlockLog.addTransaction(entry.getKey(), entry.getValue());
+    }
+
+    // Get first 30 TXs.
+    blocks = deletedBlockLog.getTransactions(30);
+    Assert.assertEquals(30, blocks.size());
+    for (int i = 0; i < 30; i++) {
+      Assert.assertEquals(i + 1, blocks.get(i).getTxID());
+    }
+
+    // Get another 30 TXs.
+    // The log only 10 left, so this time it will only return 10 TXs.
+    blocks = deletedBlockLog.getTransactions(30);
+    Assert.assertEquals(10, blocks.size());
+    for (int i = 30; i < 40; i++) {
+      Assert.assertEquals(i + 1, blocks.get(i - 30).getTxID());
+    }
+
+    // Get another 50 TXs.
+    // By now the position should have moved to the beginning,
+    // this call will return all 40 TXs.
+    blocks = deletedBlockLog.getTransactions(50);
+    Assert.assertEquals(40, blocks.size());
+    for (int i = 0; i < 40; i++) {
+      Assert.assertEquals(i + 1, blocks.get(i).getTxID());
+    }
+    List<Long> txIDs = new ArrayList<>();
+    for (DeletedBlocksTransaction block : blocks) {
+      txIDs.add(block.getTxID());
+    }
+    deletedBlockLog.commitTransactions(txIDs);
+  }
+
+  @Test
+  public void testIncrementCount() throws Exception {
+    int maxRetry = conf.getInt(OZONE_SCM_BLOCK_DELETION_MAX_RETRY, 20);
+
+    // Create 30 TXs in the log.
+    for (Map.Entry<String, List<String>> entry : generateData(30).entrySet()){
+      deletedBlockLog.addTransaction(entry.getKey(), entry.getValue());
+    }
+
+    // This will return all TXs, total num 30.
+    List<DeletedBlocksTransaction> blocks =
+        deletedBlockLog.getTransactions(40);
+    List<Long> txIDs = blocks.stream().map(DeletedBlocksTransaction::getTxID)
+        .collect(Collectors.toList());
+
+    for (int i = 0; i < maxRetry; i++) {
+      deletedBlockLog.incrementCount(txIDs);
+    }
+
+    // Increment another time so it exceed the maxRetry.
+    // On this call, count will be set to -1 which means TX eventually fails.
+    deletedBlockLog.incrementCount(txIDs);
+    blocks = deletedBlockLog.getTransactions(40);
+    for (DeletedBlocksTransaction block : blocks) {
+      Assert.assertEquals(-1, block.getCount());
+    }
+
+    // If all TXs are failed, getTransactions call will always return nothing.
+    blocks = deletedBlockLog.getTransactions(40);
+    Assert.assertEquals(blocks.size(), 0);
+  }
+
+  @Test
+  public void testCommitTransactions() throws Exception {
+    for (Map.Entry<String, List<String>> entry : generateData(50).entrySet()){
+      deletedBlockLog.addTransaction(entry.getKey(), entry.getValue());
+    }
+    List<DeletedBlocksTransaction> blocks =
+        deletedBlockLog.getTransactions(20);
+    List<Long> txIDs = new ArrayList<>();
+    for (DeletedBlocksTransaction block : blocks) {
+      txIDs.add(block.getTxID());
+    }
+    // Add an invalid txID.
+    txIDs.add(70L);
+    deletedBlockLog.commitTransactions(txIDs);
+    blocks = deletedBlockLog.getTransactions(50);
+    Assert.assertEquals(30, blocks.size());
+  }
+
+  @Test
+  public void testRandomOperateTransactions() throws Exception {
+    Random random = new Random();
+    int added = 0, committed = 0;
+    List<DeletedBlocksTransaction> blocks = new ArrayList<>();
+    List<Long> txIDs = new ArrayList<>();
+    byte[] latestTxid = DFSUtil.string2Bytes("#LATEST_TXID#");
+    MetadataKeyFilters.MetadataKeyFilter avoidLatestTxid =
+        (preKey, currentKey, nextKey) ->
+            !Arrays.equals(latestTxid, currentKey);
+    MetadataStore store = deletedBlockLog.getDeletedStore();
+    // Randomly add/get/commit/increase transactions.
+    for (int i = 0; i < 100; i++) {
+      int state = random.nextInt(4);
+      if (state == 0) {
+        for (Map.Entry<String, List<String>> entry :
+            generateData(10).entrySet()){
+          deletedBlockLog.addTransaction(entry.getKey(), entry.getValue());
+        }
+        added += 10;
+      } else if (state == 1) {
+        blocks = deletedBlockLog.getTransactions(20);
+        txIDs = new ArrayList<>();
+        for (DeletedBlocksTransaction block : blocks) {
+          txIDs.add(block.getTxID());
+        }
+        deletedBlockLog.incrementCount(txIDs);
+      } else if (state == 2) {
+        txIDs = new ArrayList<>();
+        for (DeletedBlocksTransaction block : blocks) {
+          txIDs.add(block.getTxID());
+        }
+        blocks = new ArrayList<>();
+        committed += txIDs.size();
+        deletedBlockLog.commitTransactions(txIDs);
+      } else {
+        // verify the number of added and committed.
+        List<Map.Entry<byte[], byte[]>> result =
+            store.getRangeKVs(null, added, avoidLatestTxid);
+        Assert.assertEquals(added, result.size() + committed);
+      }
+    }
+  }
+
+  @Test
+  public void testPersistence() throws Exception {
+    for (Map.Entry<String, List<String>> entry : generateData(50).entrySet()){
+      deletedBlockLog.addTransaction(entry.getKey(), entry.getValue());
+    }
+    // close db and reopen it again to make sure
+    // transactions are stored persistently.
+    deletedBlockLog.close();
+    deletedBlockLog = new DeletedBlockLogImpl(conf);
+    List<DeletedBlocksTransaction> blocks =
+        deletedBlockLog.getTransactions(10);
+    List<Long> txIDs = new ArrayList<>();
+    for (DeletedBlocksTransaction block : blocks) {
+      txIDs.add(block.getTxID());
+    }
+    deletedBlockLog.commitTransactions(txIDs);
+    blocks = deletedBlockLog.getTransactions(10);
+    Assert.assertEquals(10, blocks.size());
+  }
+
+  @Test
+  public void testDeletedBlockTransactions() throws IOException {
+    int txNum = 10;
+    int maximumAllowedTXNum = 5;
+    List<DeletedBlocksTransaction> blocks = null;
+    List<String> containerNames = new LinkedList<>();
+
+    int count = 0;
+    String containerName = null;
+    DatanodeDetails dnDd1 = DatanodeDetails.newBuilder()
+        .setUuid(UUID.randomUUID().toString())
+        .setIpAddress("127.0.0.1")
+        .setHostName("localhost")
+        .setContainerPort(0)
+        .setRatisPort(0)
+        .setOzoneRestPort(0)
+        .build();
+    DatanodeDetails dnId2 = DatanodeDetails.newBuilder()
+        .setUuid(UUID.randomUUID().toString())
+        .setIpAddress("127.0.0.1")
+        .setHostName("localhost")
+        .setContainerPort(0)
+        .setRatisPort(0)
+        .setOzoneRestPort(0)
+        .build();
+    Mapping mappingService = mock(ContainerMapping.class);
+    // Creates {TXNum} TX in the log.
+    for (Map.Entry<String, List<String>> entry : generateData(txNum)
+        .entrySet()) {
+      count++;
+      containerName = entry.getKey();
+      containerNames.add(containerName);
+      deletedBlockLog.addTransaction(containerName, entry.getValue());
+
+      // make TX[1-6] for datanode1; TX[7-10] for datanode2
+      if (count <= (maximumAllowedTXNum + 1)) {
+        mockContainerInfo(mappingService, containerName, dnDd1);
+      } else {
+        mockContainerInfo(mappingService, containerName, dnId2);
+      }
+    }
+
+    DatanodeDeletedBlockTransactions transactions =
+        new DatanodeDeletedBlockTransactions(mappingService,
+            maximumAllowedTXNum, 2);
+    deletedBlockLog.getTransactions(transactions);
+
+    List<Long> txIDs = new LinkedList<>();
+    for (UUID id : transactions.getDatanodeIDs()) {
+      List<DeletedBlocksTransaction> txs = transactions
+          .getDatanodeTransactions(id);
+      for (DeletedBlocksTransaction tx : txs) {
+        txIDs.add(tx.getTxID());
+      }
+    }
+
+    // delete TX ID
+    deletedBlockLog.commitTransactions(txIDs);
+    blocks = deletedBlockLog.getTransactions(txNum);
+    // There should be one block remained since dnID1 reaches
+    // the maximum value (5).
+    Assert.assertEquals(1, blocks.size());
+
+    Assert.assertFalse(transactions.isFull());
+    // The number of TX in dnID1 won't more than maximum value.
+    Assert.assertEquals(maximumAllowedTXNum,
+        transactions.getDatanodeTransactions(dnDd1.getUuid()).size());
+
+    int size = transactions.getDatanodeTransactions(dnId2.getUuid()).size();
+    // add duplicated container in dnID2, this should be failed.
+    DeletedBlocksTransaction.Builder builder =
+        DeletedBlocksTransaction.newBuilder();
+    builder.setTxID(11);
+    builder.setContainerName(containerName);
+    builder.setCount(0);
+    transactions.addTransaction(builder.build());
+
+    // The number of TX in dnID2 should not be changed.
+    Assert.assertEquals(size,
+        transactions.getDatanodeTransactions(dnId2.getUuid()).size());
+
+    // Add new TX in dnID2, then dnID2 will reach maximum value.
+    containerName = "newContainer";
+    builder = DeletedBlocksTransaction.newBuilder();
+    builder.setTxID(12);
+    builder.setContainerName(containerName);
+    builder.setCount(0);
+    mockContainerInfo(mappingService, containerName, dnId2);
+    transactions.addTransaction(builder.build());
+    // Since all node are full, then transactions is full.
+    Assert.assertTrue(transactions.isFull());
+  }
+
+  private void mockContainerInfo(Mapping mappingService, String containerName,
+      DatanodeDetails dd) throws IOException {
+    PipelineChannel pipelineChannel =
+        new PipelineChannel("fake", LifeCycleState.OPEN,
+            ReplicationType.STAND_ALONE, ReplicationFactor.ONE, "fake");
+    pipelineChannel.addMember(dd);
+    Pipeline pipeline = new Pipeline(containerName, pipelineChannel);
+
+    ContainerInfo.Builder builder = new ContainerInfo.Builder();
+    builder.setPipeline(pipeline);
+
+    ContainerInfo conatinerInfo = builder.build();
+    Mockito.doReturn(conatinerInfo).when(mappingService)
+        .getContainer(containerName);
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
new file mode 100644
index 0000000..f38b6f9
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
@@ -0,0 +1,520 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container;
+
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.node.NodePoolManager;
+import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMStorageReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.protocol.VersionResponse;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.assertj.core.util.Preconditions;
+import org.mockito.Mockito;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import static org.apache.hadoop.hdds.scm.TestUtils.getDatanodeDetails;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE;
+
+/**
+ * Test Helper for testing container Mapping.
+ */
+public class MockNodeManager implements NodeManager {
+  private final static NodeData[] NODES = {
+      new NodeData(10L * OzoneConsts.TB, OzoneConsts.GB),
+      new NodeData(64L * OzoneConsts.TB, 100 * OzoneConsts.GB),
+      new NodeData(128L * OzoneConsts.TB, 256 * OzoneConsts.GB),
+      new NodeData(40L * OzoneConsts.TB, OzoneConsts.TB),
+      new NodeData(256L * OzoneConsts.TB, 200 * OzoneConsts.TB),
+      new NodeData(20L * OzoneConsts.TB, 10 * OzoneConsts.GB),
+      new NodeData(32L * OzoneConsts.TB, 16 * OzoneConsts.TB),
+      new NodeData(OzoneConsts.TB, 900 * OzoneConsts.GB),
+      new NodeData(OzoneConsts.TB, 900 * OzoneConsts.GB, NodeData.STALE),
+      new NodeData(OzoneConsts.TB, 200L * OzoneConsts.GB, NodeData.STALE),
+      new NodeData(OzoneConsts.TB, 200L * OzoneConsts.GB, NodeData.DEAD)
+  };
+  private final List<DatanodeDetails> healthyNodes;
+  private final List<DatanodeDetails> staleNodes;
+  private final List<DatanodeDetails> deadNodes;
+  private final Map<UUID, SCMNodeStat> nodeMetricMap;
+  private final SCMNodeStat aggregateStat;
+  private boolean chillmode;
+  private final Map<UUID, List<SCMCommand>> commandMap;
+
+  public MockNodeManager(boolean initializeFakeNodes, int nodeCount) {
+    this.healthyNodes = new LinkedList<>();
+    this.staleNodes = new LinkedList<>();
+    this.deadNodes = new LinkedList<>();
+    this.nodeMetricMap = new HashMap<>();
+    aggregateStat = new SCMNodeStat();
+    if (initializeFakeNodes) {
+      for (int x = 0; x < nodeCount; x++) {
+        DatanodeDetails dd = getDatanodeDetails();
+        populateNodeMetric(dd, x);
+      }
+    }
+    chillmode = false;
+    this.commandMap = new HashMap<>();
+  }
+
+  /**
+   * Invoked from ctor to create some node Metrics.
+   *
+   * @param datanodeDetails - Datanode details
+   */
+  private void populateNodeMetric(DatanodeDetails datanodeDetails, int x) {
+    SCMNodeStat newStat = new SCMNodeStat();
+    long remaining =
+        NODES[x % NODES.length].capacity - NODES[x % NODES.length].used;
+    newStat.set(
+        (NODES[x % NODES.length].capacity),
+        (NODES[x % NODES.length].used), remaining);
+    this.nodeMetricMap.put(datanodeDetails.getUuid(), newStat);
+    aggregateStat.add(newStat);
+
+    if (NODES[x % NODES.length].getCurrentState() == NodeData.HEALTHY) {
+      healthyNodes.add(datanodeDetails);
+    }
+
+    if (NODES[x % NODES.length].getCurrentState() == NodeData.STALE) {
+      staleNodes.add(datanodeDetails);
+    }
+
+    if (NODES[x % NODES.length].getCurrentState() == NodeData.DEAD) {
+      deadNodes.add(datanodeDetails);
+    }
+
+  }
+
+  /**
+   * Sets the chill mode value.
+   * @param chillmode boolean
+   */
+  public void setChillmode(boolean chillmode) {
+    this.chillmode = chillmode;
+  }
+
+  /**
+   * Removes a data node from the management of this Node Manager.
+   *
+   * @param node - DataNode.
+   * @throws UnregisteredNodeException
+   */
+  @Override
+  public void removeNode(DatanodeDetails node)
+      throws UnregisteredNodeException {
+
+  }
+
+  /**
+   * Gets all Live Datanodes that is currently communicating with SCM.
+   *
+   * @param nodestate - State of the node
+   * @return List of Datanodes that are Heartbeating SCM.
+   */
+  @Override
+  public List<DatanodeDetails> getNodes(HddsProtos.NodeState nodestate) {
+    if (nodestate == HEALTHY) {
+      return healthyNodes;
+    }
+
+    if (nodestate == STALE) {
+      return staleNodes;
+    }
+
+    if (nodestate == DEAD) {
+      return deadNodes;
+    }
+
+    return null;
+  }
+
+  /**
+   * Returns the Number of Datanodes that are communicating with SCM.
+   *
+   * @param nodestate - State of the node
+   * @return int -- count
+   */
+  @Override
+  public int getNodeCount(HddsProtos.NodeState nodestate) {
+    List<DatanodeDetails> nodes = getNodes(nodestate);
+    if (nodes != null) {
+      return nodes.size();
+    }
+    return 0;
+  }
+
+  /**
+   * Get all datanodes known to SCM.
+   *
+   * @return List of DatanodeDetails known to SCM.
+   */
+  @Override
+  public List<DatanodeDetails> getAllNodes() {
+    return null;
+  }
+
+  /**
+   * Get the minimum number of nodes to get out of chill mode.
+   *
+   * @return int
+   */
+  @Override
+  public int getMinimumChillModeNodes() {
+    return 0;
+  }
+
+  /**
+   * Chill mode is the period when node manager waits for a minimum configured
+   * number of datanodes to report in. This is called chill mode to indicate the
+   * period before node manager gets into action.
+   * <p>
+   * Forcefully exits the chill mode, even if we have not met the minimum
+   * criteria of the nodes reporting in.
+   */
+  @Override
+  public void forceExitChillMode() {
+
+  }
+
+  /**
+   * Puts the node manager into manual chill mode.
+   */
+  @Override
+  public void enterChillMode() {
+
+  }
+
+  /**
+   * Brings node manager out of manual chill mode.
+   */
+  @Override
+  public void exitChillMode() {
+
+  }
+
+  /**
+   * Returns true if node manager is out of chill mode, else false.
+   * @return true if out of chill mode, else false
+   */
+  @Override
+  public boolean isOutOfChillMode() {
+    return !chillmode;
+  }
+
+  /**
+   * Returns a chill mode status string.
+   *
+   * @return String
+   */
+  @Override
+  public String getChillModeStatus() {
+    return null;
+  }
+
+  /**
+   * Returns the aggregated node stats.
+   * @return the aggregated node stats.
+   */
+  @Override
+  public SCMNodeStat getStats() {
+    return aggregateStat;
+  }
+
+  /**
+   * Return a map of nodes to their stats.
+   * @return a list of individual node stats (live/stale but not dead).
+   */
+  @Override
+  public Map<UUID, SCMNodeStat> getNodeStats() {
+    return nodeMetricMap;
+  }
+
+  /**
+   * Return the node stat of the specified datanode.
+   * @param datanodeDetails - datanode details.
+   * @return node stat if it is live/stale, null if it is dead or does't exist.
+   */
+  @Override
+  public SCMNodeMetric getNodeStat(DatanodeDetails datanodeDetails) {
+    return new SCMNodeMetric(nodeMetricMap.get(datanodeDetails.getUuid()));
+  }
+
+  @Override
+  public NodePoolManager getNodePoolManager() {
+    return Mockito.mock(NodePoolManager.class);
+  }
+
+  /**
+   * Used for testing.
+   *
+   * @return true if the HB check is done.
+   */
+  @Override
+  public boolean waitForHeartbeatProcessed() {
+    return false;
+  }
+
+  /**
+   * Returns the node state of a specific node.
+   *
+   * @param dd - DatanodeDetails
+   * @return Healthy/Stale/Dead.
+   */
+  @Override
+  public HddsProtos.NodeState getNodeState(DatanodeDetails dd) {
+    return null;
+  }
+
+  @Override
+  public void addDatanodeCommand(UUID dnId, SCMCommand command) {
+    if(commandMap.containsKey(dnId)) {
+      List<SCMCommand> commandList = commandMap.get(dnId);
+      Preconditions.checkNotNull(commandList);
+      commandList.add(command);
+    } else {
+      List<SCMCommand> commandList = new LinkedList<>();
+      commandList.add(command);
+      commandMap.put(dnId, commandList);
+    }
+  }
+
+  // Returns the number of commands that is queued to this node manager.
+  public int getCommandCount(DatanodeDetails dd) {
+    List<SCMCommand> list = commandMap.get(dd.getUuid());
+    return (list == null) ? 0 : list.size();
+  }
+
+  public void clearCommandQueue(UUID dnId) {
+    if(commandMap.containsKey(dnId)) {
+      commandMap.put(dnId, new LinkedList<>());
+    }
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated with it. If
+   * the stream is already closed then invoking this method has no effect.
+   * <p>
+   * <p> As noted in {@link AutoCloseable#close()}, cases where the close may
+   * fail require careful attention. It is strongly advised to relinquish the
+   * underlying resources and to internally <em>mark</em> the {@code Closeable}
+   * as closed, prior to throwing the {@code IOException}.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+
+  }
+
+  /**
+   * When an object implementing interface <code>Runnable</code> is used to
+   * create a thread, starting the thread causes the object's <code>run</code>
+   * method to be called in that separately executing thread.
+   * <p>
+   * The general contract of the method <code>run</code> is that it may take any
+   * action whatsoever.
+   *
+   * @see Thread#run()
+   */
+  @Override
+  public void run() {
+
+  }
+
+  /**
+   * Gets the version info from SCM.
+   *
+   * @param versionRequest - version Request.
+   * @return - returns SCM version info and other required information needed by
+   * datanode.
+   */
+  @Override
+  public VersionResponse getVersion(SCMVersionRequestProto versionRequest) {
+    return null;
+  }
+
+  /**
+   * Register the node if the node finds that it is not registered with any
+   * SCM.
+   *
+   * @param datanodeDetails DatanodeDetailsProto
+   * @return SCMHeartbeatResponseProto
+   */
+  @Override
+  public SCMCommand register(HddsProtos.DatanodeDetailsProto datanodeDetails) {
+    return null;
+  }
+
+  /**
+   * Send heartbeat to indicate the datanode is alive and doing well.
+   *
+   * @param datanodeDetails - Datanode ID.
+   * @param nodeReport - node report.
+   * @param containerReportState - container report state.
+   * @return SCMheartbeat response list
+   */
+  @Override
+  public List<SCMCommand> sendHeartbeat(
+      HddsProtos.DatanodeDetailsProto datanodeDetails,
+      SCMNodeReport nodeReport, ReportState containerReportState) {
+    if ((datanodeDetails != null) && (nodeReport != null) && (nodeReport
+        .getStorageReportCount() > 0)) {
+      SCMNodeStat stat = this.nodeMetricMap.get(datanodeDetails.getUuid());
+
+      long totalCapacity = 0L;
+      long totalRemaining = 0L;
+      long totalScmUsed = 0L;
+      List<SCMStorageReport> storageReports = nodeReport.getStorageReportList();
+      for (SCMStorageReport report : storageReports) {
+        totalCapacity += report.getCapacity();
+        totalRemaining += report.getRemaining();
+        totalScmUsed += report.getScmUsed();
+      }
+      aggregateStat.subtract(stat);
+      stat.set(totalCapacity, totalScmUsed, totalRemaining);
+      aggregateStat.add(stat);
+      nodeMetricMap.put(DatanodeDetails
+          .getFromProtoBuf(datanodeDetails).getUuid(), stat);
+
+    }
+    return null;
+  }
+
+  @Override
+  public Map<String, Integer> getNodeCount() {
+    Map<String, Integer> nodeCountMap = new HashMap<String, Integer>();
+    for (HddsProtos.NodeState state : HddsProtos.NodeState.values()) {
+      nodeCountMap.put(state.toString(), getNodeCount(state));
+    }
+    return nodeCountMap;
+  }
+
+  /**
+   * Makes it easy to add a container.
+   *
+   * @param datanodeDetails datanode details
+   * @param size number of bytes.
+   */
+  public void addContainer(DatanodeDetails datanodeDetails, long size) {
+    SCMNodeStat stat = this.nodeMetricMap.get(datanodeDetails.getUuid());
+    if (stat != null) {
+      aggregateStat.subtract(stat);
+      stat.getCapacity().add(size);
+      aggregateStat.add(stat);
+      nodeMetricMap.put(datanodeDetails.getUuid(), stat);
+    }
+  }
+
+  /**
+   * Makes it easy to simulate a delete of a container.
+   *
+   * @param datanodeDetails datanode Details
+   * @param size number of bytes.
+   */
+  public void delContainer(DatanodeDetails datanodeDetails, long size) {
+    SCMNodeStat stat = this.nodeMetricMap.get(datanodeDetails.getUuid());
+    if (stat != null) {
+      aggregateStat.subtract(stat);
+      stat.getCapacity().subtract(size);
+      aggregateStat.add(stat);
+      nodeMetricMap.put(datanodeDetails.getUuid(), stat);
+    }
+  }
+
+  /**
+   * A class to declare some values for the nodes so that our tests
+   * won't fail.
+   */
+  private static class NodeData {
+    public static final long HEALTHY = 1;
+    public static final long STALE = 2;
+    public static final long DEAD = 3;
+
+    private long capacity;
+    private long used;
+
+    private long currentState;
+
+    /**
+     * By default nodes are healthy.
+     * @param capacity
+     * @param used
+     */
+    NodeData(long capacity, long used) {
+      this(capacity, used, HEALTHY);
+    }
+
+    /**
+     * Constructs a nodeDefinition.
+     *
+     * @param capacity capacity.
+     * @param used used.
+     * @param currentState - Healthy, Stale and DEAD nodes.
+     */
+    NodeData(long capacity, long used, long currentState) {
+      this.capacity = capacity;
+      this.used = used;
+      this.currentState = currentState;
+    }
+
+    public long getCapacity() {
+      return capacity;
+    }
+
+    public void setCapacity(long capacity) {
+      this.capacity = capacity;
+    }
+
+    public long getUsed() {
+      return used;
+    }
+
+    public void setUsed(long used) {
+      this.used = used;
+    }
+
+    public long getCurrentState() {
+      return currentState;
+    }
+
+    public void setCurrentState(long currentState) {
+      this.currentState = currentState;
+    }
+
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java
new file mode 100644
index 0000000..200a611
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java
@@ -0,0 +1,333 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.SCMTestUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.NavigableSet;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Tests for Container Mapping.
+ */
+public class TestContainerMapping {
+  private static ContainerMapping mapping;
+  private static MockNodeManager nodeManager;
+  private static File testDir;
+  private static XceiverClientManager xceiverClientManager;
+  private static String containerOwner = "OZONE";
+
+  private static final long TIMEOUT = 10000;
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+  @BeforeClass
+  public static void setUp() throws Exception {
+    Configuration conf = SCMTestUtils.getConf();
+
+    testDir = GenericTestUtils
+        .getTestDir(TestContainerMapping.class.getSimpleName());
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS,
+        testDir.getAbsolutePath());
+    conf.setTimeDuration(
+        ScmConfigKeys.OZONE_SCM_CONTAINER_CREATION_LEASE_TIMEOUT,
+        TIMEOUT,
+        TimeUnit.MILLISECONDS);
+    boolean folderExisted = testDir.exists() || testDir.mkdirs();
+    if (!folderExisted) {
+      throw new IOException("Unable to create test directory path");
+    }
+    nodeManager = new MockNodeManager(true, 10);
+    mapping = new ContainerMapping(conf, nodeManager, 128);
+    xceiverClientManager = new XceiverClientManager(conf);
+  }
+
+  @AfterClass
+  public static void cleanup() throws IOException {
+    if(mapping != null) {
+      mapping.close();
+    }
+    FileUtil.fullyDelete(testDir);
+  }
+
+  @Before
+  public void clearChillMode() {
+    nodeManager.setChillmode(false);
+  }
+
+  @Test
+  public void testallocateContainer() throws Exception {
+    ContainerInfo containerInfo = mapping.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(),
+        UUID.randomUUID().toString(), containerOwner);
+    Assert.assertNotNull(containerInfo);
+  }
+
+  @Test
+  public void testallocateContainerDistributesAllocation() throws Exception {
+    /* This is a lame test, we should really be testing something like
+    z-score or make sure that we don't have 3sigma kind of events. Too lazy
+    to write all that code. This test very lamely tests if we have more than
+    5 separate nodes  from the list of 10 datanodes that got allocated a
+    container.
+     */
+    Set<UUID> pipelineList = new TreeSet<>();
+    for (int x = 0; x < 30; x++) {
+      ContainerInfo containerInfo = mapping.allocateContainer(
+          xceiverClientManager.getType(),
+          xceiverClientManager.getFactor(),
+          UUID.randomUUID().toString(), containerOwner);
+
+      Assert.assertNotNull(containerInfo);
+      Assert.assertNotNull(containerInfo.getPipeline());
+      pipelineList.add(containerInfo.getPipeline().getLeader()
+          .getUuid());
+    }
+    Assert.assertTrue(pipelineList.size() > 5);
+  }
+
+  @Test
+  public void testGetContainer() throws IOException {
+    String containerName = UUID.randomUUID().toString();
+    Pipeline pipeline = mapping.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), containerName,
+        containerOwner).getPipeline();
+    Assert.assertNotNull(pipeline);
+    Pipeline newPipeline = mapping.getContainer(containerName).getPipeline();
+    Assert.assertEquals(pipeline.getLeader().getUuid(),
+        newPipeline.getLeader().getUuid());
+  }
+
+  @Test
+  public void testDuplicateAllocateContainerFails() throws IOException {
+    String containerName = UUID.randomUUID().toString();
+    Pipeline pipeline = mapping.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), containerName,
+        containerOwner).getPipeline();
+    Assert.assertNotNull(pipeline);
+    thrown.expectMessage("Specified container already exists.");
+    mapping.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), containerName,
+        containerOwner);
+  }
+
+  @Test
+  public void testgetNoneExistentContainer() throws IOException {
+    String containerName = UUID.randomUUID().toString();
+    thrown.expectMessage("Specified key does not exist.");
+    mapping.getContainer(containerName);
+  }
+
+  @Test
+  public void testChillModeAllocateContainerFails() throws IOException {
+    String containerName = UUID.randomUUID().toString();
+    nodeManager.setChillmode(true);
+    thrown.expectMessage("Unable to create container while in chill mode");
+    mapping.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), containerName,
+        containerOwner);
+  }
+
+  @Test
+  public void testContainerCreationLeaseTimeout() throws IOException,
+      InterruptedException {
+    String containerName = UUID.randomUUID().toString();
+    nodeManager.setChillmode(false);
+    ContainerInfo containerInfo = mapping.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(),
+        containerName,
+        containerOwner);
+    mapping.updateContainerState(containerInfo.getContainerName(),
+        HddsProtos.LifeCycleEvent.CREATE);
+    Thread.sleep(TIMEOUT + 1000);
+
+    NavigableSet<ContainerID> deleteContainers = mapping.getStateManager()
+        .getMatchingContainerIDs(
+            "OZONE",
+            xceiverClientManager.getType(),
+            xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.DELETING);
+    Assert.assertTrue(deleteContainers.contains(containerInfo.containerID()));
+
+    thrown.expect(IOException.class);
+    thrown.expectMessage("Lease Exception");
+    mapping.updateContainerState(containerInfo.getContainerName(),
+        HddsProtos.LifeCycleEvent.CREATED);
+  }
+
+  @Test
+  public void testFullContainerReport() throws IOException {
+    String containerName = UUID.randomUUID().toString();
+    ContainerInfo info = createContainer(containerName);
+    DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+    ContainerReportsRequestProto.reportType reportType =
+        ContainerReportsRequestProto.reportType.fullReport;
+    List<StorageContainerDatanodeProtocolProtos.ContainerInfo> reports =
+        new ArrayList<>();
+    StorageContainerDatanodeProtocolProtos.ContainerInfo.Builder ciBuilder =
+        StorageContainerDatanodeProtocolProtos.ContainerInfo.newBuilder();
+    ciBuilder.setContainerName(containerName)
+        //setting some random hash
+        .setFinalhash("e16cc9d6024365750ed8dbd194ea46d2")
+        .setSize(5368709120L)
+        .setUsed(2000000000L)
+        .setKeyCount(100000000L)
+        .setReadCount(100000000L)
+        .setWriteCount(100000000L)
+        .setReadBytes(2000000000L)
+        .setWriteBytes(2000000000L)
+        .setContainerID(info.getContainerID());
+
+    reports.add(ciBuilder.build());
+
+    ContainerReportsRequestProto.Builder crBuilder =
+        ContainerReportsRequestProto.newBuilder();
+    crBuilder.setDatanodeDetails(datanodeDetails.getProtoBufMessage())
+        .setType(reportType).addAllReports(reports);
+
+    mapping.processContainerReports(crBuilder.build());
+
+    ContainerInfo updatedContainer = mapping.getContainer(containerName);
+    Assert.assertEquals(100000000L, updatedContainer.getNumberOfKeys());
+    Assert.assertEquals(2000000000L, updatedContainer.getUsedBytes());
+  }
+
+  @Test
+  public void testContainerCloseWithContainerReport() throws IOException {
+    String containerName = UUID.randomUUID().toString();
+    ContainerInfo info = createContainer(containerName);
+    DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+    ContainerReportsRequestProto.reportType reportType =
+        ContainerReportsRequestProto.reportType.fullReport;
+    List<StorageContainerDatanodeProtocolProtos.ContainerInfo> reports =
+        new ArrayList<>();
+
+    StorageContainerDatanodeProtocolProtos.ContainerInfo.Builder ciBuilder =
+        StorageContainerDatanodeProtocolProtos.ContainerInfo.newBuilder();
+    ciBuilder.setContainerName(containerName)
+        //setting some random hash
+        .setFinalhash("7c45eb4d7ed5e0d2e89aaab7759de02e")
+        .setSize(5368709120L)
+        .setUsed(5368705120L)
+        .setKeyCount(500000000L)
+        .setReadCount(500000000L)
+        .setWriteCount(500000000L)
+        .setReadBytes(5368705120L)
+        .setWriteBytes(5368705120L)
+        .setContainerID(info.getContainerID());
+
+    reports.add(ciBuilder.build());
+
+    ContainerReportsRequestProto.Builder crBuilder =
+        ContainerReportsRequestProto.newBuilder();
+    crBuilder.setDatanodeDetails(datanodeDetails.getProtoBufMessage())
+        .setType(reportType).addAllReports(reports);
+
+    mapping.processContainerReports(crBuilder.build());
+
+    ContainerInfo updatedContainer = mapping.getContainer(containerName);
+    Assert.assertEquals(500000000L, updatedContainer.getNumberOfKeys());
+    Assert.assertEquals(5368705120L, updatedContainer.getUsedBytes());
+    NavigableSet<ContainerID> pendingCloseContainers = mapping.getStateManager()
+        .getMatchingContainerIDs(
+            containerOwner,
+            xceiverClientManager.getType(),
+            xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.CLOSING);
+    Assert.assertTrue(
+         pendingCloseContainers.contains(updatedContainer.containerID()));
+  }
+
+  @Test
+  public void testCloseContainer() throws IOException {
+    String containerName = UUID.randomUUID().toString();
+    ContainerInfo info = createContainer(containerName);
+    mapping.updateContainerState(containerName,
+        HddsProtos.LifeCycleEvent.FINALIZE);
+    NavigableSet<ContainerID> pendingCloseContainers = mapping.getStateManager()
+        .getMatchingContainerIDs(
+            containerOwner,
+            xceiverClientManager.getType(),
+            xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.CLOSING);
+    Assert.assertTrue(pendingCloseContainers.contains(info.containerID()));
+    mapping.updateContainerState(containerName,
+        HddsProtos.LifeCycleEvent.CLOSE);
+    NavigableSet<ContainerID> closeContainers = mapping.getStateManager()
+        .getMatchingContainerIDs(
+            containerOwner,
+            xceiverClientManager.getType(),
+            xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.CLOSED);
+    Assert.assertTrue(closeContainers.contains(info.containerID()));
+  }
+
+  /**
+   * Creates a container with the given name in ContainerMapping.
+   * @param containerName
+   *          Name of the container
+   * @throws IOException
+   */
+  private ContainerInfo createContainer(String containerName)
+      throws IOException {
+    nodeManager.setChillmode(false);
+    ContainerInfo containerInfo = mapping.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(),
+        containerName,
+        containerOwner);
+    mapping.updateContainerState(containerInfo.getContainerName(),
+        HddsProtos.LifeCycleEvent.CREATE);
+    mapping.updateContainerState(containerInfo.getContainerName(),
+        HddsProtos.LifeCycleEvent.CREATED);
+    return containerInfo;
+  }
+
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/closer/TestContainerCloser.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/closer/TestContainerCloser.java
new file mode 100644
index 0000000..2fec232
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/closer/TestContainerCloser.java
@@ -0,0 +1,228 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.scm.container.closer;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.container.ContainerMapping;
+import org.apache.hadoop.hdds.scm.container.MockNodeManager;
+import org.apache.hadoop.hdds.scm.container.TestContainerMapping;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.common.SCMTestUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_SIZE_GB;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent
+    .CREATE;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent
+    .CREATED;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_CONTAINER_REPORT_INTERVAL;
+
+/**
+ * Test class for Closing Container.
+ */
+public class TestContainerCloser {
+
+  private static final long GIGABYTE = 1024L * 1024L * 1024L;
+  private static Configuration configuration;
+  private static MockNodeManager nodeManager;
+  private static ContainerMapping mapping;
+  private static long size;
+  private static File testDir;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    configuration = SCMTestUtils.getConf();
+    size = configuration.getLong(OZONE_SCM_CONTAINER_SIZE_GB,
+        OZONE_SCM_CONTAINER_SIZE_DEFAULT) * 1024 * 1024 * 1024;
+    configuration.setTimeDuration(OZONE_CONTAINER_REPORT_INTERVAL,
+        1, TimeUnit.SECONDS);
+    testDir = GenericTestUtils
+        .getTestDir(TestContainerMapping.class.getSimpleName());
+    configuration.set(OzoneConfigKeys.OZONE_METADATA_DIRS,
+        testDir.getAbsolutePath());
+    nodeManager = new MockNodeManager(true, 10);
+    mapping = new ContainerMapping(configuration, nodeManager, 128);
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    if (mapping != null) {
+      mapping.close();
+    }
+    FileUtil.fullyDelete(testDir);
+  }
+
+  @Test
+  public void testClose() throws IOException {
+    String containerName = "container-" + RandomStringUtils.randomNumeric(5);
+
+    ContainerInfo info = mapping.allocateContainer(
+        HddsProtos.ReplicationType.STAND_ALONE,
+        HddsProtos.ReplicationFactor.ONE, containerName, "ozone");
+
+    //Execute these state transitions so that we can close the container.
+    mapping.updateContainerState(containerName, CREATE);
+    mapping.updateContainerState(containerName, CREATED);
+    long currentCount = mapping.getCloser().getCloseCount();
+    long runCount = mapping.getCloser().getThreadRunCount();
+
+    DatanodeDetails datanode = info.getPipeline().getLeader();
+    // Send a container report with used set to 1 GB. This should not close.
+    sendContainerReport(info, 1 * GIGABYTE);
+
+    // with only one container the  cleaner thread should not run.
+    Assert.assertEquals(0, mapping.getCloser().getThreadRunCount());
+
+    // With only 1 GB, the container should not be queued for closing.
+    Assert.assertEquals(0, mapping.getCloser().getCloseCount());
+
+    // Assert that the Close command was not queued for this Datanode.
+    Assert.assertEquals(0, nodeManager.getCommandCount(datanode));
+
+    long newUsed = (long) (size * 0.91f);
+    sendContainerReport(info, newUsed);
+
+    // with only one container the  cleaner thread should not run.
+    Assert.assertEquals(runCount, mapping.getCloser().getThreadRunCount());
+
+    // and close count will be one.
+    Assert.assertEquals(1,
+        mapping.getCloser().getCloseCount() - currentCount);
+
+    // Assert that the Close command was Queued for this Datanode.
+    Assert.assertEquals(1, nodeManager.getCommandCount(datanode));
+  }
+
+  @Test
+  public void testRepeatedClose() throws IOException,
+      InterruptedException {
+    // This test asserts that if we queue more than one report then the
+    // second report is discarded by the system if it lands in the 3 * report
+    // frequency window.
+
+    configuration.setTimeDuration(OZONE_CONTAINER_REPORT_INTERVAL, 1,
+        TimeUnit.SECONDS);
+    String containerName = "container-" + RandomStringUtils.randomNumeric(5);
+
+    ContainerInfo info = mapping.allocateContainer(
+        HddsProtos.ReplicationType.STAND_ALONE,
+        HddsProtos.ReplicationFactor.ONE, containerName, "ozone");
+
+    //Execute these state transitions so that we can close the container.
+    mapping.updateContainerState(containerName, CREATE);
+
+    long currentCount = mapping.getCloser().getCloseCount();
+    long runCount = mapping.getCloser().getThreadRunCount();
+
+
+    DatanodeDetails datanodeDetails = info.getPipeline().getLeader();
+
+    // Send this command twice and assert we have only one command in the queue.
+    sendContainerReport(info, 5 * GIGABYTE);
+    sendContainerReport(info, 5 * GIGABYTE);
+
+    // Assert that the Close command was Queued for this Datanode.
+    Assert.assertEquals(1,
+        nodeManager.getCommandCount(datanodeDetails));
+    // And close count will be one.
+    Assert.assertEquals(1,
+        mapping.getCloser().getCloseCount() - currentCount);
+    Thread.sleep(TimeUnit.SECONDS.toMillis(4));
+
+    //send another close and the system will queue this to the command queue.
+    sendContainerReport(info, 5 * GIGABYTE);
+    Assert.assertEquals(2,
+        nodeManager.getCommandCount(datanodeDetails));
+    // but the close count will still be one, since from the point of view of
+    // closer we are closing only one container even if we have send multiple
+    // close commands to the datanode.
+    Assert.assertEquals(1, mapping.getCloser().getCloseCount()
+        - currentCount);
+  }
+
+  @Test
+  public void testCleanupThreadRuns() throws IOException,
+      InterruptedException {
+    // This test asserts that clean up thread runs once we have closed a
+    // number above cleanup water mark.
+
+    long runCount = mapping.getCloser().getThreadRunCount();
+
+    for (int x = 0; x < ContainerCloser.getCleanupWaterMark() + 10; x++) {
+      String containerName = "container-" + RandomStringUtils.randomNumeric(7);
+      ContainerInfo info = mapping.allocateContainer(
+          HddsProtos.ReplicationType.STAND_ALONE,
+          HddsProtos.ReplicationFactor.ONE, containerName, "ozone");
+      mapping.updateContainerState(containerName, CREATE);
+      mapping.updateContainerState(containerName, CREATED);
+      sendContainerReport(info, 5 * GIGABYTE);
+    }
+
+    Thread.sleep(TimeUnit.SECONDS.toMillis(1));
+
+    // Assert that cleanup thread ran at least once.
+    Assert.assertTrue(mapping.getCloser().getThreadRunCount() - runCount > 0);
+  }
+
+  private void sendContainerReport(ContainerInfo info, long used) throws
+      IOException {
+    ContainerReportsRequestProto.Builder
+        reports =  ContainerReportsRequestProto.newBuilder();
+    reports.setType(ContainerReportsRequestProto.reportType.fullReport);
+
+    StorageContainerDatanodeProtocolProtos.ContainerInfo.Builder ciBuilder =
+        StorageContainerDatanodeProtocolProtos.ContainerInfo.newBuilder();
+    ciBuilder.setContainerName(info.getContainerName())
+        .setFinalhash("e16cc9d6024365750ed8dbd194ea46d2")
+        .setSize(size)
+        .setUsed(used)
+        .setKeyCount(100000000L)
+        .setReadCount(100000000L)
+        .setWriteCount(100000000L)
+        .setReadBytes(2000000000L)
+        .setWriteBytes(2000000000L)
+        .setContainerID(1L);
+    reports.setDatanodeDetails(
+        TestUtils.getDatanodeDetails().getProtoBufMessage());
+    reports.addReports(ciBuilder);
+    mapping.processContainerReports(reports.build());
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/states/TestContainerAttribute.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/states/TestContainerAttribute.java
new file mode 100644
index 0000000..63cc9bf
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/states/TestContainerAttribute.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.scm.container.states;
+
+import org.apache.hadoop.hdds.scm.container.ContainerID;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Test ContainerAttribute management.
+ */
+public class TestContainerAttribute {
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Test
+  public void testInsert() throws SCMException {
+    ContainerAttribute<Integer> containerAttribute = new ContainerAttribute<>();
+    ContainerID id = new ContainerID(42);
+    containerAttribute.insert(1, id);
+    Assert.assertEquals(1,
+        containerAttribute.getCollection(1).size());
+    Assert.assertTrue(containerAttribute.getCollection(1).contains(id));
+
+    // Insert again and verify that it overwrites an existing value.
+    ContainerID newId =
+        new ContainerID(42);
+    containerAttribute.insert(1, newId);
+    Assert.assertEquals(1,
+        containerAttribute.getCollection(1).size());
+    Assert.assertTrue(containerAttribute.getCollection(1).contains(newId));
+  }
+
+  @Test
+  public void testHasKey() throws SCMException {
+    ContainerAttribute<Integer> containerAttribute = new ContainerAttribute<>();
+
+    for (int x = 1; x < 42; x++) {
+      containerAttribute.insert(1, new ContainerID(x));
+    }
+    Assert.assertTrue(containerAttribute.hasKey(1));
+    for (int x = 1; x < 42; x++) {
+      Assert.assertTrue(containerAttribute.hasContainerID(1, x));
+    }
+
+    Assert.assertFalse(containerAttribute.hasContainerID(1,
+        new ContainerID(42)));
+  }
+
+  @Test
+  public void testClearSet() throws SCMException {
+    List<String> keyslist = Arrays.asList("Key1", "Key2", "Key3");
+    ContainerAttribute<String> containerAttribute = new ContainerAttribute<>();
+    for (String k : keyslist) {
+      for (int x = 1; x < 101; x++) {
+        containerAttribute.insert(k, new ContainerID(x));
+      }
+    }
+    for (String k : keyslist) {
+      Assert.assertEquals(100,
+          containerAttribute.getCollection(k).size());
+    }
+    containerAttribute.clearSet("Key1");
+    Assert.assertEquals(0,
+        containerAttribute.getCollection("Key1").size());
+  }
+
+  @Test
+  public void testRemove() throws SCMException {
+
+    List<String> keyslist = Arrays.asList("Key1", "Key2", "Key3");
+    ContainerAttribute<String> containerAttribute = new ContainerAttribute<>();
+
+    for (String k : keyslist) {
+      for (int x = 1; x < 101; x++) {
+        containerAttribute.insert(k, new ContainerID(x));
+      }
+    }
+    for (int x = 1; x < 101; x += 2) {
+      containerAttribute.remove("Key1", new ContainerID(x));
+    }
+
+    for (int x = 1; x < 101; x += 2) {
+      Assert.assertFalse(containerAttribute.hasContainerID("Key1",
+          new ContainerID(x)));
+    }
+
+    Assert.assertEquals(100,
+        containerAttribute.getCollection("Key2").size());
+
+    Assert.assertEquals(100,
+        containerAttribute.getCollection("Key3").size());
+
+    Assert.assertEquals(50,
+        containerAttribute.getCollection("Key1").size());
+  }
+
+  @Test
+  public void tesUpdate() throws SCMException {
+    String key1 = "Key1";
+    String key2 = "Key2";
+    String key3 = "Key3";
+
+    ContainerAttribute<String> containerAttribute = new ContainerAttribute<>();
+    ContainerID id = new ContainerID(42);
+
+    containerAttribute.insert(key1, id);
+    Assert.assertTrue(containerAttribute.hasContainerID(key1, id));
+    Assert.assertFalse(containerAttribute.hasContainerID(key2, id));
+
+    // This should move the id from key1 bucket to key2 bucket.
+    containerAttribute.update(key1, key2, id);
+    Assert.assertFalse(containerAttribute.hasContainerID(key1, id));
+    Assert.assertTrue(containerAttribute.hasContainerID(key2, id));
+
+    // This should fail since we cannot find this id in the key3 bucket.
+    thrown.expect(SCMException.class);
+    containerAttribute.update(key3, key1, id);
+  }
+}
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestContainerPlacement.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestContainerPlacement.java
new file mode 100644
index 0000000..ad50d97
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestContainerPlacement.java
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.node;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.ContainerMapping;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .SCMContainerPlacementCapacity;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMStorageReport;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.PathUtils;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.TimeoutException;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DB_CACHE_SIZE_MB;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test for different container placement policy.
+ */
+public class TestContainerPlacement {
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+  private static XceiverClientManager xceiverClientManager =
+      new XceiverClientManager(new OzoneConfiguration());
+
+  private ReportState reportState = ReportState.newBuilder()
+      .setState(ReportState.states.noContainerReports)
+      .setCount(0).build();
+
+  /**
+   * Returns a new copy of Configuration.
+   *
+   * @return Config
+   */
+  OzoneConfiguration getConf() {
+    return new OzoneConfiguration();
+  }
+
+  /**
+   * Creates a NodeManager.
+   *
+   * @param config - Config for the node manager.
+   * @return SCNNodeManager
+   * @throws IOException
+   */
+
+  SCMNodeManager createNodeManager(OzoneConfiguration config)
+      throws IOException {
+    SCMNodeManager nodeManager = new SCMNodeManager(config,
+        UUID.randomUUID().toString(), null);
+    assertFalse("Node manager should be in chill mode",
+        nodeManager.isOutOfChillMode());
+    return nodeManager;
+  }
+
+  ContainerMapping createContainerManager(Configuration config,
+      NodeManager scmNodeManager) throws IOException {
+    final int cacheSize = config.getInt(OZONE_SCM_DB_CACHE_SIZE_MB,
+        OZONE_SCM_DB_CACHE_SIZE_DEFAULT);
+    return new ContainerMapping(config, scmNodeManager, cacheSize);
+
+  }
+
+  /**
+   * Test capacity based container placement policy with node reports.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testContainerPlacementCapacity() throws IOException,
+      InterruptedException, TimeoutException {
+    OzoneConfiguration conf = getConf();
+    final int nodeCount = 4;
+    final long capacity = 10L * OzoneConsts.GB;
+    final long used = 2L * OzoneConsts.GB;
+    final long remaining = capacity - used;
+
+    final File testDir = PathUtils.getTestDir(
+        TestContainerPlacement.class);
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS,
+        testDir.getAbsolutePath());
+    conf.setClass(ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
+        SCMContainerPlacementCapacity.class, ContainerPlacementPolicy.class);
+
+    SCMNodeManager nodeManager = createNodeManager(conf);
+    ContainerMapping containerManager =
+        createContainerManager(conf, nodeManager);
+    List<DatanodeDetails> datanodes =
+        TestUtils.getListOfRegisteredDatanodeDetails(nodeManager, nodeCount);
+    try {
+      for (DatanodeDetails datanodeDetails : datanodes) {
+        SCMNodeReport.Builder nrb = SCMNodeReport.newBuilder();
+        SCMStorageReport.Builder srb = SCMStorageReport.newBuilder();
+        srb.setStorageUuid(UUID.randomUUID().toString());
+        srb.setCapacity(capacity).setScmUsed(used).
+            setRemaining(remaining).build();
+        nodeManager.sendHeartbeat(datanodeDetails.getProtoBufMessage(),
+            nrb.addStorageReport(srb).build(), reportState);
+      }
+
+      GenericTestUtils.waitFor(() -> nodeManager.waitForHeartbeatProcessed(),
+          100, 4 * 1000);
+      assertEquals(nodeCount, nodeManager.getNodeCount(HEALTHY));
+      assertEquals(capacity * nodeCount,
+          (long) nodeManager.getStats().getCapacity().get());
+      assertEquals(used * nodeCount,
+          (long) nodeManager.getStats().getScmUsed().get());
+      assertEquals(remaining * nodeCount,
+          (long) nodeManager.getStats().getRemaining().get());
+
+      assertTrue(nodeManager.isOutOfChillMode());
+
+      String container1 = UUID.randomUUID().toString();
+      Pipeline pipeline1 = containerManager.allocateContainer(
+          xceiverClientManager.getType(),
+          xceiverClientManager.getFactor(), container1, "OZONE")
+          .getPipeline();
+      assertEquals(xceiverClientManager.getFactor().getNumber(),
+          pipeline1.getMachines().size());
+    } finally {
+      IOUtils.closeQuietly(containerManager);
+      IOUtils.closeQuietly(nodeManager);
+      FileUtil.fullyDelete(testDir);
+    }
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestNodeManager.java
new file mode 100644
index 0000000..89ce12e
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestNodeManager.java
@@ -0,0 +1,1176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.node;
+
+import com.google.common.base.Supplier;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMStorageReport;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.PathUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
+import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DEADNODE_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_MAX_HB_COUNT_TO_PROCESS;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_STALENODE_INTERVAL;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE;
+import static org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.core.StringStartsWith.startsWith;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test the Node Manager class.
+ */
+public class TestNodeManager {
+
+  private File testDir;
+
+  private ReportState reportState = ReportState.newBuilder()
+      .setState(ReportState.states.noContainerReports)
+      .setCount(0).build();
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @BeforeClass
+  public static void init() throws IOException {
+  }
+
+  @Before
+  public void setup() {
+    testDir = PathUtils.getTestDir(
+        TestNodeManager.class);
+  }
+
+  @After
+  public void cleanup() {
+    FileUtil.fullyDelete(testDir);
+  }
+
+  /**
+   * Returns a new copy of Configuration.
+   *
+   * @return Config
+   */
+  OzoneConfiguration getConf() {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS,
+        testDir.getAbsolutePath());
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 100,
+        TimeUnit.MILLISECONDS);
+    return conf;
+  }
+
+  /**
+   * Creates a NodeManager.
+   *
+   * @param config - Config for the node manager.
+   * @return SCNNodeManager
+   * @throws IOException
+   */
+
+  SCMNodeManager createNodeManager(OzoneConfiguration config)
+      throws IOException {
+    SCMNodeManager nodeManager = new SCMNodeManager(config,
+        UUID.randomUUID().toString(), null);
+    assertFalse("Node manager should be in chill mode",
+        nodeManager.isOutOfChillMode());
+    return nodeManager;
+  }
+
+  /**
+   * Tests that Node manager handles heartbeats correctly, and comes out of
+   * chill Mode.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmHeartbeat() throws IOException,
+      InterruptedException, TimeoutException {
+
+    try (SCMNodeManager nodeManager = createNodeManager(getConf())) {
+      // Send some heartbeats from different nodes.
+      for (int x = 0; x < nodeManager.getMinimumChillModeNodes(); x++) {
+        DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails(
+            nodeManager);
+        nodeManager.sendHeartbeat(datanodeDetails.getProtoBufMessage(),
+            null, reportState);
+      }
+
+      // Wait for 4 seconds max.
+      GenericTestUtils.waitFor(() -> nodeManager.waitForHeartbeatProcessed(),
+          100, 4 * 1000);
+
+      assertTrue("Heartbeat thread should have picked up the" +
+              "scheduled heartbeats and transitioned out of chill mode.",
+          nodeManager.isOutOfChillMode());
+    }
+  }
+
+  /**
+   * asserts that if we send no heartbeats node manager stays in chillmode.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmNoHeartbeats() throws IOException,
+      InterruptedException, TimeoutException {
+
+    try (SCMNodeManager nodeManager = createNodeManager(getConf())) {
+      GenericTestUtils.waitFor(() -> nodeManager.waitForHeartbeatProcessed(),
+          100, 4 * 1000);
+      assertFalse("No heartbeats, Node manager should have been in" +
+          " chill mode.", nodeManager.isOutOfChillMode());
+    }
+  }
+
+  /**
+   * Asserts that if we don't get enough unique nodes we stay in chillmode.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmNotEnoughHeartbeats() throws IOException,
+      InterruptedException, TimeoutException {
+    try (SCMNodeManager nodeManager = createNodeManager(getConf())) {
+
+      // Need 100 nodes to come out of chill mode, only one node is sending HB.
+      nodeManager.setMinimumChillModeNodes(100);
+      nodeManager.sendHeartbeat(TestUtils.getDatanodeDetails(nodeManager)
+          .getProtoBufMessage(),
+          null, reportState);
+      GenericTestUtils.waitFor(() -> nodeManager.waitForHeartbeatProcessed(),
+          100, 4 * 1000);
+      assertFalse("Not enough heartbeat, Node manager should have" +
+          "been in chillmode.", nodeManager.isOutOfChillMode());
+    }
+  }
+
+  /**
+   * Asserts that many heartbeat from the same node is counted as a single
+   * node.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmSameNodeHeartbeats() throws IOException,
+      InterruptedException, TimeoutException {
+
+    try (SCMNodeManager nodeManager = createNodeManager(getConf())) {
+      nodeManager.setMinimumChillModeNodes(3);
+      DatanodeDetails datanodeDetails = TestUtils
+          .getDatanodeDetails(nodeManager);
+
+      // Send 10 heartbeat from same node, and assert we never leave chill mode.
+      for (int x = 0; x < 10; x++) {
+        nodeManager.sendHeartbeat(datanodeDetails.getProtoBufMessage(),
+            null, reportState);
+      }
+
+      GenericTestUtils.waitFor(() -> nodeManager.waitForHeartbeatProcessed(),
+          100, 4 * 1000);
+      assertFalse("Not enough nodes have send heartbeat to node" +
+          "manager.", nodeManager.isOutOfChillMode());
+    }
+  }
+
+  /**
+   * Asserts that adding heartbeats after shutdown does not work. This implies
+   * that heartbeat thread has been shutdown safely by closing the node
+   * manager.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmShutdown() throws IOException, InterruptedException,
+      TimeoutException {
+    OzoneConfiguration conf = getConf();
+    conf.getTimeDuration(ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
+        100, TimeUnit.MILLISECONDS);
+    SCMNodeManager nodeManager = createNodeManager(conf);
+    DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails(nodeManager);
+    nodeManager.close();
+
+    // These should never be processed.
+    nodeManager.sendHeartbeat(datanodeDetails.getProtoBufMessage(),
+        null, reportState);
+
+    // Let us just wait for 2 seconds to prove that HBs are not processed.
+    Thread.sleep(2 * 1000);
+
+    assertEquals("Assert new HBs were never processed", 0,
+        nodeManager.getLastHBProcessedCount());
+  }
+
+  /**
+   * Asserts scm informs datanodes to re-register with the nodemanager
+   * on a restart.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testScmHeartbeatAfterRestart() throws Exception {
+    OzoneConfiguration conf = getConf();
+    conf.getTimeDuration(ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
+        100, TimeUnit.MILLISECONDS);
+    DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+    try (SCMNodeManager nodemanager = createNodeManager(conf)) {
+      nodemanager.register(datanodeDetails.getProtoBufMessage());
+      List<SCMCommand> command = nodemanager.sendHeartbeat(
+          datanodeDetails.getProtoBufMessage(),
+          null, reportState);
+      Assert.assertTrue(nodemanager.getAllNodes().contains(datanodeDetails));
+      Assert.assertTrue("On regular HB calls, SCM responses a "
+          + "datanode with an empty command list", command.isEmpty());
+    }
+
+    // Sends heartbeat without registering to SCM.
+    // This happens when SCM restarts.
+    try (SCMNodeManager nodemanager = createNodeManager(conf)) {
+      Assert.assertFalse(nodemanager
+          .getAllNodes().contains(datanodeDetails));
+      try {
+        // SCM handles heartbeat asynchronously.
+        // It may need more than one heartbeat processing to
+        // send the notification.
+        GenericTestUtils.waitFor(new Supplier<Boolean>() {
+          @Override public Boolean get() {
+            List<SCMCommand> command =
+                nodemanager.sendHeartbeat(datanodeDetails.getProtoBufMessage(),
+                    null, reportState);
+            return command.size() == 1 && command.get(0).getType()
+                .equals(SCMCmdType.reregisterCommand);
+          }
+        }, 100, 3 * 1000);
+      } catch (TimeoutException e) {
+        Assert.fail("Times out to verify that scm informs "
+            + "datanode to re-register itself.");
+      }
+    }
+  }
+
+  /**
+   * Asserts that we detect as many healthy nodes as we have generated heartbeat
+   * for.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmHealthyNodeCount() throws IOException,
+      InterruptedException, TimeoutException {
+    OzoneConfiguration conf = getConf();
+    final int count = 10;
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+
+      for (int x = 0; x < count; x++) {
+        DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails(
+            nodeManager);
+        nodeManager.sendHeartbeat(datanodeDetails.getProtoBufMessage(),
+            null, reportState);
+      }
+      GenericTestUtils.waitFor(() -> nodeManager.waitForHeartbeatProcessed(),
+          100, 4 * 1000);
+      assertEquals(count, nodeManager.getNodeCount(HEALTHY));
+    }
+  }
+
+  /**
+   * Asserts that if user provides a value less than 5 times the heartbeat
+   * interval as the StaleNode Value, we throw since that is a QoS that we
+   * cannot maintain.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+
+  @Test
+  public void testScmSanityOfUserConfig1() throws IOException,
+      InterruptedException, TimeoutException {
+    OzoneConfiguration conf = getConf();
+    final int interval = 100;
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, interval,
+        MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, SECONDS);
+
+    // This should be 5 times more than  OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL
+    // and 3 times more than OZONE_SCM_HEARTBEAT_INTERVAL
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, interval, MILLISECONDS);
+
+    thrown.expect(IllegalArgumentException.class);
+
+    // This string is a multiple of the interval value
+    thrown.expectMessage(
+        startsWith("100 is not within min = 500 or max = 100000"));
+    createNodeManager(conf);
+  }
+
+  /**
+   * Asserts that if Stale Interval value is more than 5 times the value of HB
+   * processing thread it is a sane value.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmSanityOfUserConfig2() throws IOException,
+      InterruptedException, TimeoutException {
+    OzoneConfiguration conf = getConf();
+    final int interval = 100;
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, interval,
+        TimeUnit.MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, TimeUnit.SECONDS);
+
+    // This should be 5 times more than  OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL
+    // and 3 times more than OZONE_SCM_HEARTBEAT_INTERVAL
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3 * 1000, MILLISECONDS);
+    createNodeManager(conf).close();
+  }
+
+  /**
+   * Asserts that a single node moves from Healthy to stale node, then from
+   * stale node to dead node if it misses enough heartbeats.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmDetectStaleAndDeadNode() throws IOException,
+      InterruptedException, TimeoutException {
+    final int interval = 100;
+    final int nodeCount = 10;
+
+    OzoneConfiguration conf = getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, interval,
+        MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, 6, SECONDS);
+
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      List<DatanodeDetails> nodeList = createNodeSet(nodeManager, nodeCount);
+
+
+      DatanodeDetails staleNode = TestUtils.getDatanodeDetails(nodeManager);
+
+      // Heartbeat once
+      nodeManager.sendHeartbeat(staleNode.getProtoBufMessage(),
+          null, reportState);
+
+      // Heartbeat all other nodes.
+      for (DatanodeDetails dn : nodeList) {
+        nodeManager.sendHeartbeat(dn.getProtoBufMessage(), null, reportState);
+      }
+
+      // Wait for 2 seconds .. and heartbeat good nodes again.
+      Thread.sleep(2 * 1000);
+
+      for (DatanodeDetails dn : nodeList) {
+        nodeManager.sendHeartbeat(dn.getProtoBufMessage(), null, reportState);
+      }
+
+      // Wait for 2 seconds, wait a total of 4 seconds to make sure that the
+      // node moves into stale state.
+      Thread.sleep(2 * 1000);
+      List<DatanodeDetails> staleNodeList = nodeManager.getNodes(STALE);
+      assertEquals("Expected to find 1 stale node",
+          1, nodeManager.getNodeCount(STALE));
+      assertEquals("Expected to find 1 stale node",
+          1, staleNodeList.size());
+      assertEquals("Stale node is not the expected ID", staleNode
+          .getUuid(), staleNodeList.get(0).getUuid());
+      Thread.sleep(1000);
+
+      // heartbeat good nodes again.
+      for (DatanodeDetails dn : nodeList) {
+        nodeManager.sendHeartbeat(dn.getProtoBufMessage(), null, reportState);
+      }
+
+      //  6 seconds is the dead window for this test , so we wait a total of
+      // 7 seconds to make sure that the node moves into dead state.
+      Thread.sleep(2 * 1000);
+
+      // the stale node has been removed
+      staleNodeList = nodeManager.getNodes(STALE);
+      assertEquals("Expected to find 1 stale node",
+          0, nodeManager.getNodeCount(STALE));
+      assertEquals("Expected to find 1 stale node",
+          0, staleNodeList.size());
+
+      // Check for the dead node now.
+      List<DatanodeDetails> deadNodeList = nodeManager.getNodes(DEAD);
+      assertEquals("Expected to find 1 dead node", 1,
+          nodeManager.getNodeCount(DEAD));
+      assertEquals("Expected to find 1 dead node",
+          1, deadNodeList.size());
+      assertEquals("Dead node is not the expected ID", staleNode
+          .getUuid(), deadNodeList.get(0).getUuid());
+    }
+  }
+
+  /**
+   * Check for NPE when datanodeDetails is passed null for sendHeartbeat.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmCheckForErrorOnNullDatanodeDetails() throws IOException,
+      InterruptedException, TimeoutException {
+    try (SCMNodeManager nodeManager = createNodeManager(getConf())) {
+      nodeManager.sendHeartbeat(null, null, reportState);
+    } catch (NullPointerException npe) {
+      GenericTestUtils.assertExceptionContains("Heartbeat is missing " +
+          "DatanodeDetails.", npe);
+    }
+  }
+
+  /**
+   * Asserts that a dead node, stale node and healthy nodes co-exist. The counts
+   * , lists and node ID match the expected node state.
+   * <p/>
+   * This test is pretty complicated because it explores all states of Node
+   * manager in a single test. Please read thru the comments to get an idea of
+   * the current state of the node Manager.
+   * <p/>
+   * This test is written like a state machine to avoid threads and concurrency
+   * issues. This test is replicated below with the use of threads. Avoiding
+   * threads make it easy to debug the state machine.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmClusterIsInExpectedState1() throws IOException,
+      InterruptedException, TimeoutException {
+    /**
+     * These values are very important. Here is what it means so you don't
+     * have to look it up while reading this code.
+     *
+     *  OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL - This the frequency of the
+     *  HB processing thread that is running in the SCM. This thread must run
+     *  for the SCM  to process the Heartbeats.
+     *
+     *  OZONE_SCM_HEARTBEAT_INTERVAL - This is the frequency at which
+     *  datanodes will send heartbeats to SCM. Please note: This is the only
+     *  config value for node manager that is specified in seconds. We don't
+     *  want SCM heartbeat resolution to be more than in seconds.
+     *  In this test it is not used, but we are forced to set it because we
+     *  have validation code that checks Stale Node interval and Dead Node
+     *  interval is larger than the value of
+     *  OZONE_SCM_HEARTBEAT_INTERVAL.
+     *
+     *  OZONE_SCM_STALENODE_INTERVAL - This is the time that must elapse
+     *  from the last heartbeat for us to mark a node as stale. In this test
+     *  we set that to 3. That is if a node has not heartbeat SCM for last 3
+     *  seconds we will mark it as stale.
+     *
+     *  OZONE_SCM_DEADNODE_INTERVAL - This is the time that must elapse
+     *  from the last heartbeat for a node to be marked dead. We have an
+     *  additional constraint that this must be at least 2 times bigger than
+     *  Stale node Interval.
+     *
+     *  With these we are trying to explore the state of this cluster with
+     *  various timeouts. Each section is commented so that you can keep
+     *  track of the state of the cluster nodes.
+     *
+     */
+
+    OzoneConfiguration conf = getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 100,
+        MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, 6, SECONDS);
+
+
+    /**
+     * Cluster state: Healthy: All nodes are heartbeat-ing like normal.
+     */
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      DatanodeDetails healthyNode =
+          TestUtils.getDatanodeDetails(nodeManager);
+      DatanodeDetails staleNode =
+          TestUtils.getDatanodeDetails(nodeManager);
+      DatanodeDetails deadNode =
+          TestUtils.getDatanodeDetails(nodeManager);
+      nodeManager.sendHeartbeat(
+          healthyNode.getProtoBufMessage(), null, reportState);
+      nodeManager.sendHeartbeat(
+          staleNode.getProtoBufMessage(), null, reportState);
+      nodeManager.sendHeartbeat(
+          deadNode.getProtoBufMessage(), null, reportState);
+
+      // Sleep so that heartbeat processing thread gets to run.
+      Thread.sleep(500);
+
+      //Assert all nodes are healthy.
+      assertEquals(3, nodeManager.getAllNodes().size());
+      assertEquals(3, nodeManager.getNodeCount(HEALTHY));
+
+      /**
+       * Cluster state: Quiesced: We are going to sleep for 3 seconds. Which
+       * means that no node is heartbeating. All nodes should move to Stale.
+       */
+      Thread.sleep(3 * 1000);
+      assertEquals(3, nodeManager.getAllNodes().size());
+      assertEquals(3, nodeManager.getNodeCount(STALE));
+
+
+      /**
+       * Cluster State : Move healthy node back to healthy state, move other 2
+       * nodes to Stale State.
+       *
+       * We heartbeat healthy node after 1 second and let other 2 nodes elapse
+       * the 3 second windows.
+       */
+
+      nodeManager.sendHeartbeat(
+          healthyNode.getProtoBufMessage(), null, reportState);
+      nodeManager.sendHeartbeat(
+          staleNode.getProtoBufMessage(), null, reportState);
+      nodeManager.sendHeartbeat(
+          deadNode.getProtoBufMessage(), null, reportState);
+
+      Thread.sleep(1500);
+      nodeManager.sendHeartbeat(
+          healthyNode.getProtoBufMessage(), null, reportState);
+      Thread.sleep(2 * 1000);
+      assertEquals(1, nodeManager.getNodeCount(HEALTHY));
+
+
+      // 3.5 seconds from last heartbeat for the stale and deadNode. So those
+      //  2 nodes must move to Stale state and the healthy node must
+      // remain in the healthy State.
+      List<DatanodeDetails> healthyList = nodeManager.getNodes(HEALTHY);
+      assertEquals("Expected one healthy node", 1, healthyList.size());
+      assertEquals("Healthy node is not the expected ID", healthyNode
+          .getUuid(), healthyList.get(0).getUuid());
+
+      assertEquals(2, nodeManager.getNodeCount(STALE));
+
+      /**
+       * Cluster State: Allow healthyNode to remain in healthy state and
+       * staleNode to move to stale state and deadNode to move to dead state.
+       */
+
+      nodeManager.sendHeartbeat(
+          healthyNode.getProtoBufMessage(), null, reportState);
+      nodeManager.sendHeartbeat(
+          staleNode.getProtoBufMessage(), null, reportState);
+      Thread.sleep(1500);
+      nodeManager.sendHeartbeat(
+          healthyNode.getProtoBufMessage(), null, reportState);
+      Thread.sleep(2 * 1000);
+
+      // 3.5 seconds have elapsed for stale node, so it moves into Stale.
+      // 7 seconds have elapsed for dead node, so it moves into dead.
+      // 2 Seconds have elapsed for healthy node, so it stays in healhty state.
+      healthyList = nodeManager.getNodes(HEALTHY);
+      List<DatanodeDetails> staleList = nodeManager.getNodes(STALE);
+      List<DatanodeDetails> deadList = nodeManager.getNodes(DEAD);
+
+      assertEquals(3, nodeManager.getAllNodes().size());
+      assertEquals(1, nodeManager.getNodeCount(HEALTHY));
+      assertEquals(1, nodeManager.getNodeCount(STALE));
+      assertEquals(1, nodeManager.getNodeCount(DEAD));
+
+      assertEquals("Expected one healthy node",
+          1, healthyList.size());
+      assertEquals("Healthy node is not the expected ID", healthyNode
+          .getUuid(), healthyList.get(0).getUuid());
+
+      assertEquals("Expected one stale node",
+          1, staleList.size());
+      assertEquals("Stale node is not the expected ID", staleNode
+          .getUuid(), staleList.get(0).getUuid());
+
+      assertEquals("Expected one dead node",
+          1, deadList.size());
+      assertEquals("Dead node is not the expected ID", deadNode
+          .getUuid(), deadList.get(0).getUuid());
+      /**
+       * Cluster State : let us heartbeat all the nodes and verify that we get
+       * back all the nodes in healthy state.
+       */
+      nodeManager.sendHeartbeat(
+          healthyNode.getProtoBufMessage(), null, reportState);
+      nodeManager.sendHeartbeat(
+          staleNode.getProtoBufMessage(), null, reportState);
+      nodeManager.sendHeartbeat(
+          deadNode.getProtoBufMessage(), null, reportState);
+      Thread.sleep(500);
+      //Assert all nodes are healthy.
+      assertEquals(3, nodeManager.getAllNodes().size());
+      assertEquals(3, nodeManager.getNodeCount(HEALTHY));
+    }
+  }
+
+  /**
+   * Heartbeat a given set of nodes at a specified frequency.
+   *
+   * @param manager       - Node Manager
+   * @param list          - List of datanodeIDs
+   * @param sleepDuration - Duration to sleep between heartbeats.
+   * @throws InterruptedException
+   */
+  private void heartbeatNodeSet(SCMNodeManager manager,
+                                List<DatanodeDetails> list,
+                                int sleepDuration) throws InterruptedException {
+    while (!Thread.currentThread().isInterrupted()) {
+      for (DatanodeDetails dn : list) {
+        manager.sendHeartbeat(dn.getProtoBufMessage(), null, reportState);
+      }
+      Thread.sleep(sleepDuration);
+    }
+  }
+
+  /**
+   * Create a set of Nodes with a given prefix.
+   *
+   * @param count  - number of nodes.
+   * @return List of Nodes.
+   */
+  private List<DatanodeDetails> createNodeSet(SCMNodeManager nodeManager, int
+      count) {
+    List<DatanodeDetails> list = new LinkedList<>();
+    for (int x = 0; x < count; x++) {
+      list.add(TestUtils.getDatanodeDetails(nodeManager, UUID.randomUUID()
+          .toString()));
+    }
+    return list;
+  }
+
+  /**
+   * Function that tells us if we found the right number of stale nodes.
+   *
+   * @param nodeManager - node manager
+   * @param count       - number of stale nodes to look for.
+   * @return true if we found the expected number.
+   */
+  private boolean findNodes(NodeManager nodeManager, int count,
+      HddsProtos.NodeState state) {
+    return count == nodeManager.getNodeCount(state);
+  }
+
+  /**
+   * Asserts that we can create a set of nodes that send its heartbeats from
+   * different threads and NodeManager behaves as expected.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  @Test
+  public void testScmClusterIsInExpectedState2() throws IOException,
+      InterruptedException, TimeoutException {
+    final int healthyCount = 5000;
+    final int staleCount = 100;
+    final int deadCount = 10;
+
+    OzoneConfiguration conf = getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 100,
+        MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, 6, SECONDS);
+    conf.setInt(OZONE_SCM_MAX_HB_COUNT_TO_PROCESS, 7000);
+
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      List<DatanodeDetails> healthyNodeList = createNodeSet(nodeManager,
+          healthyCount);
+      List<DatanodeDetails> staleNodeList = createNodeSet(nodeManager,
+          staleCount);
+      List<DatanodeDetails> deadNodeList = createNodeSet(nodeManager,
+          deadCount);
+
+      Runnable healthyNodeTask = () -> {
+        try {
+          // 2 second heartbeat makes these nodes stay healthy.
+          heartbeatNodeSet(nodeManager, healthyNodeList, 2 * 1000);
+        } catch (InterruptedException ignored) {
+        }
+      };
+
+      Runnable staleNodeTask = () -> {
+        try {
+          // 4 second heartbeat makes these nodes go to stale and back to
+          // healthy again.
+          heartbeatNodeSet(nodeManager, staleNodeList, 4 * 1000);
+        } catch (InterruptedException ignored) {
+        }
+      };
+
+
+      // No Thread just one time HBs the node manager, so that these will be
+      // marked as dead nodes eventually.
+      for (DatanodeDetails dn : deadNodeList) {
+        nodeManager.sendHeartbeat(dn.getProtoBufMessage(), null, reportState);
+      }
+
+
+      Thread thread1 = new Thread(healthyNodeTask);
+      thread1.setDaemon(true);
+      thread1.start();
+
+
+      Thread thread2 = new Thread(staleNodeTask);
+      thread2.setDaemon(true);
+      thread2.start();
+
+      Thread.sleep(10 * 1000);
+
+      // Assert all healthy nodes are healthy now, this has to be a greater
+      // than check since Stale nodes can be healthy when we check the state.
+
+      assertTrue(nodeManager.getNodeCount(HEALTHY) >= healthyCount);
+
+      assertEquals(deadCount, nodeManager.getNodeCount(DEAD));
+
+      List<DatanodeDetails> deadList = nodeManager.getNodes(DEAD);
+
+      for (DatanodeDetails node : deadList) {
+        assertTrue(deadNodeList.contains(node));
+      }
+
+
+
+      // Checking stale nodes is tricky since they have to move between
+      // healthy and stale to avoid becoming dead nodes. So we search for
+      // that state for a while, if we don't find that state waitfor will
+      // throw.
+      GenericTestUtils.waitFor(() -> findNodes(nodeManager, staleCount, STALE),
+          500, 4 * 1000);
+
+      thread1.interrupt();
+      thread2.interrupt();
+    }
+  }
+
+  /**
+   * Asserts that we can handle 6000+ nodes heartbeating SCM.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmCanHandleScale() throws IOException,
+      InterruptedException, TimeoutException {
+    final int healthyCount = 3000;
+    final int staleCount = 3000;
+    OzoneConfiguration conf = getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 100,
+        MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1,
+        SECONDS);
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3 * 1000,
+        MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, 6 * 1000,
+        MILLISECONDS);
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      List<DatanodeDetails> healthyList = createNodeSet(nodeManager,
+          healthyCount);
+      List<DatanodeDetails> staleList = createNodeSet(nodeManager,
+          staleCount);
+
+      Runnable healthyNodeTask = () -> {
+        try {
+          heartbeatNodeSet(nodeManager, healthyList, 2 * 1000);
+        } catch (InterruptedException ignored) {
+
+        }
+      };
+
+      Runnable staleNodeTask = () -> {
+        try {
+          heartbeatNodeSet(nodeManager, staleList, 4 * 1000);
+        } catch (InterruptedException ignored) {
+        }
+      };
+
+      Thread thread1 = new Thread(healthyNodeTask);
+      thread1.setDaemon(true);
+      thread1.start();
+
+      Thread thread2 = new Thread(staleNodeTask);
+      thread2.setDaemon(true);
+      thread2.start();
+      Thread.sleep(3 * 1000);
+
+      GenericTestUtils.waitFor(() -> findNodes(nodeManager, staleCount, STALE),
+          500, 20 * 1000);
+      assertEquals("Node count mismatch",
+          healthyCount + staleCount, nodeManager.getAllNodes().size());
+
+      thread1.interrupt();
+      thread2.interrupt();
+    }
+  }
+
+  /**
+   * Asserts that SCM backs off from HB processing instead of going into an
+   * infinite loop if SCM is flooded with too many heartbeats. This many not be
+   * the best thing to do, but SCM tries to protect itself and logs an error
+   * saying that it is getting flooded with heartbeats. In real world this can
+   * lead to many nodes becoming stale or dead due to the fact that SCM is not
+   * able to keep up with heartbeat processing. This test just verifies that SCM
+   * will log that information.
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmLogsHeartbeatFlooding() throws IOException,
+      InterruptedException, TimeoutException {
+    final int healthyCount = 3000;
+
+    // Make the HB process thread run slower.
+    OzoneConfiguration conf = getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 500,
+        TimeUnit.MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, SECONDS);
+    conf.setInt(OZONE_SCM_MAX_HB_COUNT_TO_PROCESS, 500);
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      List<DatanodeDetails> healthyList = createNodeSet(nodeManager,
+          healthyCount);
+      GenericTestUtils.LogCapturer logCapturer =
+          GenericTestUtils.LogCapturer.captureLogs(SCMNodeManager.LOG);
+      Runnable healthyNodeTask = () -> {
+        try {
+          // No wait in the HB sending loop.
+          heartbeatNodeSet(nodeManager, healthyList, 0);
+        } catch (InterruptedException ignored) {
+        }
+      };
+      Thread thread1 = new Thread(healthyNodeTask);
+      thread1.setDaemon(true);
+      thread1.start();
+
+      GenericTestUtils.waitFor(() -> logCapturer.getOutput()
+          .contains("SCM is being "
+              + "flooded by heartbeats. Not able to keep up"
+              + " with the heartbeat counts."),
+          500, 20 * 1000);
+
+      thread1.interrupt();
+      logCapturer.stopCapturing();
+    }
+  }
+
+  @Test
+  public void testScmEnterAndExitChillMode() throws IOException,
+      InterruptedException {
+    OzoneConfiguration conf = getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 100,
+        MILLISECONDS);
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      nodeManager.setMinimumChillModeNodes(10);
+      DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails(
+          nodeManager);
+      nodeManager.sendHeartbeat(
+          datanodeDetails.getProtoBufMessage(), null, reportState);
+      String status = nodeManager.getChillModeStatus();
+      Assert.assertThat(status, containsString("Still in chill " +
+          "mode, waiting on nodes to report in."));
+
+      // Should not exit chill mode since 10 nodes have not heartbeat yet.
+      assertFalse(nodeManager.isOutOfChillMode());
+
+      // Force exit chill mode.
+      nodeManager.forceExitChillMode();
+      assertTrue(nodeManager.isOutOfChillMode());
+      status = nodeManager.getChillModeStatus();
+      Assert.assertThat(status,
+          containsString("Out of chill mode."));
+
+
+      // Enter back to into chill mode.
+      nodeManager.enterChillMode();
+      assertFalse(nodeManager.isOutOfChillMode());
+      status = nodeManager.getChillModeStatus();
+      Assert.assertThat(status,
+          containsString("Out of startup chill mode," +
+              " but in manual chill mode."));
+
+      // Assert that node manager force enter cannot be overridden by nodes HBs.
+      for (int x = 0; x < 20; x++) {
+        DatanodeDetails datanode = TestUtils.getDatanodeDetails(nodeManager);
+        nodeManager.sendHeartbeat(datanode.getProtoBufMessage(),
+            null, reportState);
+      }
+
+      Thread.sleep(500);
+      assertFalse(nodeManager.isOutOfChillMode());
+
+      // Make sure that once we exit out of manual chill mode, we fall back
+      // to the number of nodes to get out chill mode.
+      nodeManager.exitChillMode();
+      assertTrue(nodeManager.isOutOfChillMode());
+      status = nodeManager.getChillModeStatus();
+      Assert.assertThat(status,
+          containsString("Out of chill mode."));
+    }
+  }
+
+  /**
+   * Test multiple nodes sending initial heartbeat with their node report.
+   *
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmStatsFromNodeReport() throws IOException,
+      InterruptedException, TimeoutException {
+    OzoneConfiguration conf = getConf();
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 1000,
+        MILLISECONDS);
+    final int nodeCount = 10;
+    final long capacity = 2000;
+    final long used = 100;
+    final long remaining = capacity - used;
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      for (int x = 0; x < nodeCount; x++) {
+        DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails(
+            nodeManager);
+
+        SCMNodeReport.Builder nrb = SCMNodeReport.newBuilder();
+        SCMStorageReport.Builder srb = SCMStorageReport.newBuilder();
+        srb.setStorageUuid(UUID.randomUUID().toString());
+        srb.setCapacity(capacity).setScmUsed(used).
+            setRemaining(capacity - used).build();
+        nodeManager.sendHeartbeat(datanodeDetails.getProtoBufMessage(),
+            nrb.addStorageReport(srb).build(), reportState);
+      }
+      GenericTestUtils.waitFor(() -> nodeManager.waitForHeartbeatProcessed(),
+          100, 4 * 1000);
+      assertEquals(nodeCount, nodeManager.getNodeCount(HEALTHY));
+      assertEquals(capacity * nodeCount, (long) nodeManager.getStats()
+          .getCapacity().get());
+      assertEquals(used * nodeCount, (long) nodeManager.getStats()
+          .getScmUsed().get());
+      assertEquals(remaining * nodeCount, (long) nodeManager.getStats()
+          .getRemaining().get());
+    }
+  }
+
+  /**
+   * Test single node stat update based on nodereport from different heartbeat
+   * status (healthy, stale and dead).
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws TimeoutException
+   */
+  @Test
+  public void testScmNodeReportUpdate() throws IOException,
+      InterruptedException, TimeoutException {
+    OzoneConfiguration conf = getConf();
+    final int heartbeatCount = 5;
+    final int nodeCount = 1;
+    final int interval = 100;
+
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, interval,
+        MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, 6, SECONDS);
+
+    try (SCMNodeManager nodeManager = createNodeManager(conf)) {
+      DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails(
+          nodeManager);
+      final long capacity = 2000;
+      final long usedPerHeartbeat = 100;
+
+      for (int x = 0; x < heartbeatCount; x++) {
+        SCMNodeReport.Builder nrb = SCMNodeReport.newBuilder();
+        SCMStorageReport.Builder srb = SCMStorageReport.newBuilder();
+        srb.setStorageUuid(UUID.randomUUID().toString());
+        srb.setCapacity(capacity).setScmUsed(x * usedPerHeartbeat)
+            .setRemaining(capacity - x * usedPerHeartbeat).build();
+        nrb.addStorageReport(srb);
+
+        nodeManager.sendHeartbeat(
+            datanodeDetails.getProtoBufMessage(), nrb.build(), reportState);
+        Thread.sleep(100);
+      }
+
+      final long expectedScmUsed = usedPerHeartbeat * (heartbeatCount - 1);
+      final long expectedRemaining = capacity - expectedScmUsed;
+
+      GenericTestUtils.waitFor(
+          () -> nodeManager.getStats().getScmUsed().get() == expectedScmUsed,
+          100, 4 * 1000);
+
+      long foundCapacity = nodeManager.getStats().getCapacity().get();
+      assertEquals(capacity, foundCapacity);
+
+      long foundScmUsed = nodeManager.getStats().getScmUsed().get();
+      assertEquals(expectedScmUsed, foundScmUsed);
+
+      long foundRemaining = nodeManager.getStats().getRemaining().get();
+      assertEquals(expectedRemaining, foundRemaining);
+
+      // Test NodeManager#getNodeStats
+      assertEquals(nodeCount, nodeManager.getNodeStats().size());
+      long nodeCapacity = nodeManager.getNodeStat(datanodeDetails).get()
+          .getCapacity().get();
+      assertEquals(capacity, nodeCapacity);
+
+      foundScmUsed = nodeManager.getNodeStat(datanodeDetails).get().getScmUsed()
+          .get();
+      assertEquals(expectedScmUsed, foundScmUsed);
+
+      foundRemaining = nodeManager.getNodeStat(datanodeDetails).get()
+          .getRemaining().get();
+      assertEquals(expectedRemaining, foundRemaining);
+
+      // Compare the result from
+      // NodeManager#getNodeStats and NodeManager#getNodeStat
+      SCMNodeStat stat1 = nodeManager.getNodeStats().
+          get(datanodeDetails.getUuid());
+      SCMNodeStat stat2 = nodeManager.getNodeStat(datanodeDetails).get();
+      assertEquals(stat1, stat2);
+
+      // Wait up to 4s so that the node becomes stale
+      // Verify the usage info should be unchanged.
+      GenericTestUtils.waitFor(
+          () -> nodeManager.getNodeCount(STALE) == 1, 100,
+          4 * 1000);
+      assertEquals(nodeCount, nodeManager.getNodeStats().size());
+
+      foundCapacity = nodeManager.getNodeStat(datanodeDetails).get()
+          .getCapacity().get();
+      assertEquals(capacity, foundCapacity);
+      foundScmUsed = nodeManager.getNodeStat(datanodeDetails).get()
+          .getScmUsed().get();
+      assertEquals(expectedScmUsed, foundScmUsed);
+
+      foundRemaining = nodeManager.getNodeStat(datanodeDetails).get().
+          getRemaining().get();
+      assertEquals(expectedRemaining, foundRemaining);
+
+      // Wait up to 4 more seconds so the node becomes dead
+      // Verify usage info should be updated.
+      GenericTestUtils.waitFor(
+          () -> nodeManager.getNodeCount(DEAD) == 1, 100,
+          4 * 1000);
+
+      assertEquals(0, nodeManager.getNodeStats().size());
+      foundCapacity = nodeManager.getStats().getCapacity().get();
+      assertEquals(0, foundCapacity);
+
+      foundScmUsed = nodeManager.getStats().getScmUsed().get();
+      assertEquals(0, foundScmUsed);
+
+      foundRemaining = nodeManager.getStats().getRemaining().get();
+      assertEquals(0, foundRemaining);
+
+      // Send a new report to bring the dead node back to healthy
+      SCMNodeReport.Builder nrb = SCMNodeReport.newBuilder();
+      SCMStorageReport.Builder srb = SCMStorageReport.newBuilder();
+      srb.setStorageUuid(UUID.randomUUID().toString());
+      srb.setCapacity(capacity).setScmUsed(expectedScmUsed)
+          .setRemaining(expectedRemaining).build();
+      nrb.addStorageReport(srb);
+      nodeManager.sendHeartbeat(
+          datanodeDetails.getProtoBufMessage(), nrb.build(), reportState);
+
+      // Wait up to 5 seconds so that the dead node becomes healthy
+      // Verify usage info should be updated.
+      GenericTestUtils.waitFor(
+          () -> nodeManager.getNodeCount(HEALTHY) == 1,
+          100, 5 * 1000);
+      GenericTestUtils.waitFor(
+          () -> nodeManager.getStats().getScmUsed().get() == expectedScmUsed,
+          100, 4 * 1000);
+      assertEquals(nodeCount, nodeManager.getNodeStats().size());
+      foundCapacity = nodeManager.getNodeStat(datanodeDetails).get()
+          .getCapacity().get();
+      assertEquals(capacity, foundCapacity);
+      foundScmUsed = nodeManager.getNodeStat(datanodeDetails).get().getScmUsed()
+          .get();
+      assertEquals(expectedScmUsed, foundScmUsed);
+      foundRemaining = nodeManager.getNodeStat(datanodeDetails).get()
+          .getRemaining().get();
+      assertEquals(expectedRemaining, foundRemaining);
+    }
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodePoolManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodePoolManager.java
new file mode 100644
index 0000000..8f412de
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/node/TestSCMNodePoolManager.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.node;
+
+import org.apache.commons.collections.ListUtils;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .SCMContainerPlacementCapacity;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.test.PathUtils;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test for SCM node pool manager.
+ */
+public class TestSCMNodePoolManager {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestSCMNodePoolManager.class);
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  private final File testDir = PathUtils.getTestDir(
+      TestSCMNodePoolManager.class);
+
+  SCMNodePoolManager createNodePoolManager(OzoneConfiguration conf)
+      throws IOException {
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS,
+        testDir.getAbsolutePath());
+    conf.setClass(ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
+        SCMContainerPlacementCapacity.class, ContainerPlacementPolicy.class);
+    return new SCMNodePoolManager(conf);
+  }
+
+  /**
+   * Test default node pool.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testDefaultNodePool() throws IOException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    try {
+      final String defaultPool = "DefaultPool";
+      NodePoolManager npMgr = createNodePoolManager(conf);
+
+      final int nodeCount = 4;
+      final List<DatanodeDetails> nodes = TestUtils
+          .getListOfDatanodeDetails(nodeCount);
+      assertEquals(0, npMgr.getNodePools().size());
+      for (DatanodeDetails node: nodes) {
+        npMgr.addNode(defaultPool, node);
+      }
+      List<DatanodeDetails> nodesRetrieved = npMgr.getNodes(defaultPool);
+      assertEquals(nodeCount, nodesRetrieved.size());
+      assertTwoDatanodeListsEqual(nodes, nodesRetrieved);
+
+      DatanodeDetails nodeRemoved = nodes.remove(2);
+      npMgr.removeNode(defaultPool, nodeRemoved);
+      List<DatanodeDetails> nodesAfterRemove = npMgr.getNodes(defaultPool);
+      assertTwoDatanodeListsEqual(nodes, nodesAfterRemove);
+
+      List<DatanodeDetails> nonExistSet = npMgr.getNodes("NonExistSet");
+      assertEquals(0, nonExistSet.size());
+    } finally {
+      FileUtil.fullyDelete(testDir);
+    }
+  }
+
+
+  /**
+   * Test default node pool reload.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testDefaultNodePoolReload() throws IOException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    final String defaultPool = "DefaultPool";
+    final int nodeCount = 4;
+    final List<DatanodeDetails> nodes = TestUtils
+        .getListOfDatanodeDetails(nodeCount);
+
+    try {
+      try {
+        SCMNodePoolManager npMgr = createNodePoolManager(conf);
+        assertEquals(0, npMgr.getNodePools().size());
+        for (DatanodeDetails node : nodes) {
+          npMgr.addNode(defaultPool, node);
+        }
+        List<DatanodeDetails> nodesRetrieved = npMgr.getNodes(defaultPool);
+        assertEquals(nodeCount, nodesRetrieved.size());
+        assertTwoDatanodeListsEqual(nodes, nodesRetrieved);
+        npMgr.close();
+      } finally {
+        LOG.info("testDefaultNodePoolReload: Finish adding nodes to pool" +
+            " and close.");
+      }
+
+      // try reload with a new NodePoolManager instance
+      try {
+        SCMNodePoolManager npMgr = createNodePoolManager(conf);
+        List<DatanodeDetails> nodesRetrieved = npMgr.getNodes(defaultPool);
+        assertEquals(nodeCount, nodesRetrieved.size());
+        assertTwoDatanodeListsEqual(nodes, nodesRetrieved);
+      } finally {
+        LOG.info("testDefaultNodePoolReload: Finish reloading node pool.");
+      }
+    } finally {
+      FileUtil.fullyDelete(testDir);
+    }
+  }
+
+  /**
+   * Compare and verify that two datanode lists are equal.
+   * @param list1 - datanode list 1.
+   * @param list2 - datanode list 2.
+   */
+  private void assertTwoDatanodeListsEqual(List<DatanodeDetails> list1,
+      List<DatanodeDetails> list2) {
+    assertEquals(list1.size(), list2.size());
+    Collections.sort(list1);
+    Collections.sort(list2);
+    assertTrue(ListUtils.isEqualList(list1, list2));
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/package-info.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/package-info.java
new file mode 100644
index 0000000..da05c59
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm;
+/**
+ * SCM tests
+ */
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java
new file mode 100644
index 0000000..d0839c5
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/common/TestEndPoint.java
@@ -0,0 +1,458 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.common;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.VersionInfo;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMStorageReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerReport;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .DatanodeStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .EndpointStateMachine;
+import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
+import org.apache.hadoop.ozone.container.common.states.endpoint
+    .HeartbeatEndpointTask;
+import org.apache.hadoop.ozone.container.common.states.endpoint
+    .RegisterEndpointTask;
+import org.apache.hadoop.ozone.container.common.states.endpoint
+    .VersionEndpointTask;
+import org.apache.hadoop.test.PathUtils;
+import org.apache.hadoop.util.Time;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.util.UUID;
+
+import static org.apache.hadoop.hdds.scm.TestUtils.getDatanodeDetails;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState.states
+    .noContainerReports;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+import static org.apache.hadoop.ozone.container.common.ContainerTestUtils
+    .createEndpoint;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+
+/**
+ * Tests the endpoints.
+ */
+public class TestEndPoint {
+  private static InetSocketAddress serverAddress;
+  private static RPC.Server scmServer;
+  private static ScmTestMock scmServerImpl;
+  private static File testDir;
+  private static StorageContainerDatanodeProtocolProtos.ReportState
+      defaultReportState;
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    if (scmServer != null) {
+      scmServer.stop();
+    }
+    FileUtil.fullyDelete(testDir);
+  }
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    serverAddress = SCMTestUtils.getReuseableAddress();
+    scmServerImpl = new ScmTestMock();
+    scmServer = SCMTestUtils.startScmRpcServer(SCMTestUtils.getConf(),
+        scmServerImpl, serverAddress, 10);
+    testDir = PathUtils.getTestDir(TestEndPoint.class);
+    defaultReportState = StorageContainerDatanodeProtocolProtos.
+        ReportState.newBuilder().setState(noContainerReports).
+        setCount(0).build();
+  }
+
+  @Test
+  /**
+   * This test asserts that we are able to make a version call to SCM server
+   * and gets back the expected values.
+   */
+  public void testGetVersion() throws Exception {
+    try (EndpointStateMachine rpcEndPoint =
+             createEndpoint(SCMTestUtils.getConf(),
+                 serverAddress, 1000)) {
+      SCMVersionResponseProto responseProto = rpcEndPoint.getEndPoint()
+          .getVersion(null);
+      Assert.assertNotNull(responseProto);
+      Assert.assertEquals(VersionInfo.DESCRIPTION_KEY,
+          responseProto.getKeys(0).getKey());
+      Assert.assertEquals(VersionInfo.getLatestVersion().getDescription(),
+          responseProto.getKeys(0).getValue());
+    }
+  }
+
+  @Test
+  /**
+   * We make getVersion RPC call, but via the VersionEndpointTask which is
+   * how the state machine would make the call.
+   */
+  public void testGetVersionTask() throws Exception {
+    Configuration conf = SCMTestUtils.getConf();
+    try (EndpointStateMachine rpcEndPoint = createEndpoint(conf,
+        serverAddress, 1000)) {
+      rpcEndPoint.setState(EndpointStateMachine.EndPointStates.GETVERSION);
+      VersionEndpointTask versionTask = new VersionEndpointTask(rpcEndPoint,
+          conf);
+      EndpointStateMachine.EndPointStates newState = versionTask.call();
+
+      // if version call worked the endpoint should automatically move to the
+      // next state.
+      Assert.assertEquals(EndpointStateMachine.EndPointStates.REGISTER,
+          newState);
+
+      // Now rpcEndpoint should remember the version it got from SCM
+      Assert.assertNotNull(rpcEndPoint.getVersion());
+    }
+  }
+
+  @Test
+  /**
+   * This test makes a call to end point where there is no SCM server. We
+   * expect that versionTask should be able to handle it.
+   */
+  public void testGetVersionToInvalidEndpoint() throws Exception {
+    Configuration conf = SCMTestUtils.getConf();
+    InetSocketAddress nonExistentServerAddress = SCMTestUtils
+        .getReuseableAddress();
+    try (EndpointStateMachine rpcEndPoint = createEndpoint(conf,
+        nonExistentServerAddress, 1000)) {
+      rpcEndPoint.setState(EndpointStateMachine.EndPointStates.GETVERSION);
+      VersionEndpointTask versionTask = new VersionEndpointTask(rpcEndPoint,
+          conf);
+      EndpointStateMachine.EndPointStates newState = versionTask.call();
+
+      // This version call did NOT work, so endpoint should remain in the same
+      // state.
+      Assert.assertEquals(EndpointStateMachine.EndPointStates.GETVERSION,
+          newState);
+    }
+  }
+
+  @Test
+  /**
+   * This test makes a getVersionRPC call, but the DummyStorageServer is
+   * going to respond little slowly. We will assert that we are still in the
+   * GETVERSION state after the timeout.
+   */
+  public void testGetVersionAssertRpcTimeOut() throws Exception {
+    final long rpcTimeout = 1000;
+    final long tolerance = 100;
+    Configuration conf = SCMTestUtils.getConf();
+
+    try (EndpointStateMachine rpcEndPoint = createEndpoint(conf,
+        serverAddress, (int) rpcTimeout)) {
+      rpcEndPoint.setState(EndpointStateMachine.EndPointStates.GETVERSION);
+      VersionEndpointTask versionTask = new VersionEndpointTask(rpcEndPoint,
+          conf);
+
+      scmServerImpl.setRpcResponseDelay(1500);
+      long start = Time.monotonicNow();
+      EndpointStateMachine.EndPointStates newState = versionTask.call();
+      long end = Time.monotonicNow();
+      scmServerImpl.setRpcResponseDelay(0);
+      Assert.assertThat(end - start, lessThanOrEqualTo(rpcTimeout + tolerance));
+      Assert.assertEquals(EndpointStateMachine.EndPointStates.GETVERSION,
+          newState);
+    }
+  }
+
+  @Test
+  public void testRegister() throws Exception {
+    String[] scmAddressArray = new String[1];
+    scmAddressArray[0] = serverAddress.toString();
+    DatanodeDetails nodeToRegister = getDatanodeDetails();
+    try (EndpointStateMachine rpcEndPoint =
+             createEndpoint(
+                 SCMTestUtils.getConf(), serverAddress, 1000)) {
+      SCMRegisteredCmdResponseProto responseProto = rpcEndPoint.getEndPoint()
+          .register(nodeToRegister.getProtoBufMessage(), scmAddressArray);
+      Assert.assertNotNull(responseProto);
+      Assert.assertEquals(nodeToRegister.getUuidString(),
+          responseProto.getDatanodeUUID());
+      Assert.assertNotNull(responseProto.getClusterID());
+    }
+  }
+
+  private EndpointStateMachine registerTaskHelper(InetSocketAddress scmAddress,
+      int rpcTimeout, boolean clearDatanodeDetails) throws Exception {
+    Configuration conf = SCMTestUtils.getConf();
+    EndpointStateMachine rpcEndPoint =
+        createEndpoint(conf,
+            scmAddress, rpcTimeout);
+    rpcEndPoint.setState(EndpointStateMachine.EndPointStates.REGISTER);
+    RegisterEndpointTask endpointTask =
+        new RegisterEndpointTask(rpcEndPoint, conf);
+    if (!clearDatanodeDetails) {
+      DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+      endpointTask.setDatanodeDetails(datanodeDetails);
+    }
+    endpointTask.call();
+    return rpcEndPoint;
+  }
+
+  @Test
+  public void testRegisterTask() throws Exception {
+    try (EndpointStateMachine rpcEndpoint =
+             registerTaskHelper(serverAddress, 1000, false)) {
+      // Successful register should move us to Heartbeat state.
+      Assert.assertEquals(EndpointStateMachine.EndPointStates.HEARTBEAT,
+          rpcEndpoint.getState());
+    }
+  }
+
+  @Test
+  public void testRegisterToInvalidEndpoint() throws Exception {
+    InetSocketAddress address = SCMTestUtils.getReuseableAddress();
+    try (EndpointStateMachine rpcEndpoint =
+             registerTaskHelper(address, 1000, false)) {
+      Assert.assertEquals(EndpointStateMachine.EndPointStates.REGISTER,
+          rpcEndpoint.getState());
+    }
+  }
+
+  @Test
+  public void testRegisterNoContainerID() throws Exception {
+    InetSocketAddress address = SCMTestUtils.getReuseableAddress();
+    try (EndpointStateMachine rpcEndpoint =
+             registerTaskHelper(address, 1000, true)) {
+      // No Container ID, therefore we tell the datanode that we would like to
+      // shutdown.
+      Assert.assertEquals(EndpointStateMachine.EndPointStates.SHUTDOWN,
+          rpcEndpoint.getState());
+    }
+  }
+
+  @Test
+  public void testRegisterRpcTimeout() throws Exception {
+    final long rpcTimeout = 1000;
+    final long tolerance = 200;
+    scmServerImpl.setRpcResponseDelay(1500);
+    long start = Time.monotonicNow();
+    registerTaskHelper(serverAddress, 1000, false).close();
+    long end = Time.monotonicNow();
+    scmServerImpl.setRpcResponseDelay(0);
+    Assert.assertThat(end - start, lessThanOrEqualTo(rpcTimeout + tolerance));
+  }
+
+  @Test
+  public void testHeartbeat() throws Exception {
+    DatanodeDetails dataNode = getDatanodeDetails();
+    try (EndpointStateMachine rpcEndPoint =
+             createEndpoint(SCMTestUtils.getConf(),
+                 serverAddress, 1000)) {
+      SCMNodeReport.Builder nrb = SCMNodeReport.newBuilder();
+      SCMStorageReport.Builder srb = SCMStorageReport.newBuilder();
+      srb.setStorageUuid(UUID.randomUUID().toString());
+      srb.setCapacity(2000).setScmUsed(500).setRemaining(1500).build();
+      nrb.addStorageReport(srb);
+      SCMHeartbeatResponseProto responseProto = rpcEndPoint.getEndPoint()
+          .sendHeartbeat(
+              dataNode.getProtoBufMessage(), nrb.build(), defaultReportState);
+      Assert.assertNotNull(responseProto);
+      Assert.assertEquals(0, responseProto.getCommandsCount());
+    }
+  }
+
+  private void heartbeatTaskHelper(InetSocketAddress scmAddress,
+      int rpcTimeout) throws Exception {
+    Configuration conf = SCMTestUtils.getConf();
+    conf.set(DFS_DATANODE_DATA_DIR_KEY, testDir.getAbsolutePath());
+    conf.set(OZONE_METADATA_DIRS, testDir.getAbsolutePath());
+    // Mini Ozone cluster will not come up if the port is not true, since
+    // Ratis will exit if the server port cannot be bound. We can remove this
+    // hard coding once we fix the Ratis default behaviour.
+    conf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT, true);
+
+
+    // Create a datanode state machine for stateConext used by endpoint task
+    try (DatanodeStateMachine stateMachine = new DatanodeStateMachine(
+        TestUtils.getDatanodeDetails(), conf);
+        EndpointStateMachine rpcEndPoint =
+            createEndpoint(conf, scmAddress, rpcTimeout)) {
+      HddsProtos.DatanodeDetailsProto datanodeDetailsProto =
+          getDatanodeDetails().getProtoBufMessage();
+      rpcEndPoint.setState(EndpointStateMachine.EndPointStates.HEARTBEAT);
+
+      final StateContext stateContext =
+          new StateContext(conf, DatanodeStateMachine.DatanodeStates.RUNNING,
+              stateMachine);
+
+      HeartbeatEndpointTask endpointTask =
+          new HeartbeatEndpointTask(rpcEndPoint, conf, stateContext);
+      endpointTask.setDatanodeDetailsProto(datanodeDetailsProto);
+      endpointTask.call();
+      Assert.assertNotNull(endpointTask.getDatanodeDetailsProto());
+
+      Assert.assertEquals(EndpointStateMachine.EndPointStates.HEARTBEAT,
+          rpcEndPoint.getState());
+    }
+  }
+
+  @Test
+  public void testHeartbeatTask() throws Exception {
+    heartbeatTaskHelper(serverAddress, 1000);
+  }
+
+  @Test
+  public void testHeartbeatTaskToInvalidNode() throws Exception {
+    InetSocketAddress invalidAddress = SCMTestUtils.getReuseableAddress();
+    heartbeatTaskHelper(invalidAddress, 1000);
+  }
+
+  @Test
+  public void testHeartbeatTaskRpcTimeOut() throws Exception {
+    final long rpcTimeout = 1000;
+    final long tolerance = 200;
+    scmServerImpl.setRpcResponseDelay(1500);
+    long start = Time.monotonicNow();
+    InetSocketAddress invalidAddress = SCMTestUtils.getReuseableAddress();
+    heartbeatTaskHelper(invalidAddress, 1000);
+    long end = Time.monotonicNow();
+    scmServerImpl.setRpcResponseDelay(0);
+    Assert.assertThat(end - start,
+        lessThanOrEqualTo(rpcTimeout + tolerance));
+  }
+
+  /**
+   * Returns a new container report.
+   * @return
+   */
+  ContainerReport getRandomContainerReport() {
+    return new ContainerReport(UUID.randomUUID().toString(),
+        DigestUtils.sha256Hex("Random"));
+  }
+
+  /**
+   * Creates dummy container reports.
+   * @param count - The number of closed containers to create.
+   * @return ContainerReportsProto
+   */
+  StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto
+      createDummyContainerReports(int count) {
+    StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto.Builder
+        reportsBuilder = StorageContainerDatanodeProtocolProtos
+        .ContainerReportsRequestProto.newBuilder();
+    for (int x = 0; x < count; x++) {
+      reportsBuilder.addReports(getRandomContainerReport()
+          .getProtoBufMessage());
+    }
+    reportsBuilder.setDatanodeDetails(getDatanodeDetails()
+        .getProtoBufMessage());
+    reportsBuilder.setType(StorageContainerDatanodeProtocolProtos
+        .ContainerReportsRequestProto.reportType.fullReport);
+    return reportsBuilder.build();
+  }
+
+  /**
+   * Tests that rpcEndpoint sendContainerReport works as expected.
+   * @throws Exception
+   */
+  @Test
+  public void testContainerReportSend() throws Exception {
+    final int count = 1000;
+    scmServerImpl.reset();
+    try (EndpointStateMachine rpcEndPoint =
+             createEndpoint(SCMTestUtils.getConf(),
+                 serverAddress, 1000)) {
+      ContainerReportsResponseProto responseProto = rpcEndPoint
+          .getEndPoint().sendContainerReport(createDummyContainerReports(
+              count));
+      Assert.assertNotNull(responseProto);
+    }
+    Assert.assertEquals(1, scmServerImpl.getContainerReportsCount());
+    Assert.assertEquals(count, scmServerImpl.getContainerCount());
+  }
+
+
+  /**
+   * Tests that rpcEndpoint sendContainerReport works as expected.
+   * @throws Exception
+   */
+  @Test
+  public void testContainerReport() throws Exception {
+    final int count = 1000;
+    scmServerImpl.reset();
+    try (EndpointStateMachine rpcEndPoint =
+             createEndpoint(SCMTestUtils.getConf(),
+                 serverAddress, 1000)) {
+      ContainerReportsResponseProto responseProto = rpcEndPoint
+          .getEndPoint().sendContainerReport(createContainerReport(count));
+      Assert.assertNotNull(responseProto);
+    }
+    Assert.assertEquals(1, scmServerImpl.getContainerReportsCount());
+    Assert.assertEquals(count, scmServerImpl.getContainerCount());
+    final long expectedKeyCount = count * 1000;
+    Assert.assertEquals(expectedKeyCount, scmServerImpl.getKeyCount());
+    final long expectedBytesUsed = count * OzoneConsts.GB * 2;
+    Assert.assertEquals(expectedBytesUsed, scmServerImpl.getBytesUsed());
+  }
+
+  private ContainerReportsRequestProto createContainerReport(int count) {
+    StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto.Builder
+        reportsBuilder = StorageContainerDatanodeProtocolProtos
+        .ContainerReportsRequestProto.newBuilder();
+    for (int x = 0; x < count; x++) {
+      ContainerReport report = new ContainerReport(UUID.randomUUID().toString(),
+            DigestUtils.sha256Hex("Simulated"));
+      report.setKeyCount(1000);
+      report.setSize(OzoneConsts.GB * 5);
+      report.setBytesUsed(OzoneConsts.GB * 2);
+      report.setReadCount(100);
+      report.setReadBytes(OzoneConsts.GB * 1);
+      report.setWriteCount(50);
+      report.setWriteBytes(OzoneConsts.GB * 2);
+      report.setContainerID(1);
+
+      reportsBuilder.addReports(report.getProtoBufMessage());
+    }
+    reportsBuilder.setDatanodeDetails(getDatanodeDetails()
+        .getProtoBufMessage());
+    reportsBuilder.setType(StorageContainerDatanodeProtocolProtos
+        .ContainerReportsRequestProto.reportType.fullReport);
+    return reportsBuilder.build();
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java
new file mode 100644
index 0000000..651b776
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestContainerPlacement.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.placement;
+
+import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.container.MockNodeManager;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .SCMContainerPlacementCapacity;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms
+    .SCMContainerPlacementRandom;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.List;
+import java.util.Random;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Asserts that allocation strategy works as expected.
+ */
+public class TestContainerPlacement {
+
+  private DescriptiveStatistics computeStatistics(NodeManager nodeManager) {
+    DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics();
+    for (DatanodeDetails dd : nodeManager.getNodes(HEALTHY)) {
+      float weightedValue =
+          nodeManager.getNodeStat(dd).get().getScmUsed().get() / (float)
+              nodeManager.getNodeStat(dd).get().getCapacity().get();
+      descriptiveStatistics.addValue(weightedValue);
+    }
+    return descriptiveStatistics;
+  }
+
+  /**
+   * This test simulates lots of Cluster I/O and updates the metadata in SCM.
+   * We simulate adding and removing containers from the cluster. It asserts
+   * that our placement algorithm has taken the capacity of nodes into
+   * consideration by asserting that standard deviation of used space on these
+   * has improved.
+   */
+  @Test
+  public void testCapacityPlacementYieldsBetterDataDistribution() throws
+      SCMException {
+    final int opsCount = 200 * 1000;
+    final int nodesRequired = 3;
+    Random random = new Random();
+
+    // The nature of init code in MockNodeManager yields similar clusters.
+    MockNodeManager nodeManagerCapacity = new MockNodeManager(true, 100);
+    MockNodeManager nodeManagerRandom = new MockNodeManager(true, 100);
+    DescriptiveStatistics beforeCapacity =
+        computeStatistics(nodeManagerCapacity);
+    DescriptiveStatistics beforeRandom = computeStatistics(nodeManagerRandom);
+
+    //Assert that our initial layout of clusters are similar.
+    assertEquals(beforeCapacity.getStandardDeviation(), beforeRandom
+        .getStandardDeviation(), 0.001);
+
+    SCMContainerPlacementCapacity capacityPlacer = new
+        SCMContainerPlacementCapacity(nodeManagerCapacity, new Configuration());
+    SCMContainerPlacementRandom randomPlacer = new
+        SCMContainerPlacementRandom(nodeManagerRandom, new Configuration());
+
+    for (int x = 0; x < opsCount; x++) {
+      long containerSize = random.nextInt(100) * OzoneConsts.GB;
+      List<DatanodeDetails> nodesCapacity =
+          capacityPlacer.chooseDatanodes(nodesRequired, containerSize);
+      assertEquals(nodesRequired, nodesCapacity.size());
+
+      List<DatanodeDetails> nodesRandom =
+          randomPlacer.chooseDatanodes(nodesRequired, containerSize);
+
+      // One fifth of all calls are delete
+      if (x % 5 == 0) {
+        deleteContainer(nodeManagerCapacity, nodesCapacity, containerSize);
+        deleteContainer(nodeManagerRandom, nodesRandom, containerSize);
+      } else {
+        createContainer(nodeManagerCapacity, nodesCapacity, containerSize);
+        createContainer(nodeManagerRandom, nodesRandom, containerSize);
+      }
+    }
+    DescriptiveStatistics postCapacity = computeStatistics(nodeManagerCapacity);
+    DescriptiveStatistics postRandom = computeStatistics(nodeManagerRandom);
+
+    // This is a very bold claim, and needs large number of I/O operations.
+    // The claim in this assertion is that we improved the data distribution
+    // of this cluster in relation to the start state of the cluster.
+    Assert.assertTrue(beforeCapacity.getStandardDeviation() >
+        postCapacity.getStandardDeviation());
+
+    // This asserts that Capacity placement yields a better placement
+    // algorithm than random placement, since both cluster started at an
+    // identical state.
+
+    Assert.assertTrue(postRandom.getStandardDeviation() >
+        postCapacity.getStandardDeviation());
+  }
+
+  private void deleteContainer(MockNodeManager nodeManager,
+      List<DatanodeDetails> nodes, long containerSize) {
+    for (DatanodeDetails dd : nodes) {
+      nodeManager.delContainer(dd, containerSize);
+    }
+  }
+
+  private void createContainer(MockNodeManager nodeManager,
+      List<DatanodeDetails> nodes, long containerSize) {
+    for (DatanodeDetails dd : nodes) {
+      nodeManager.addContainer(dd, containerSize);
+    }
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java
new file mode 100644
index 0000000..7150d1b
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/placement/TestDatanodeMetrics.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.placement;
+
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests that test Metrics that support placement.
+ */
+public class TestDatanodeMetrics {
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+  @Test
+  public void testSCMNodeMetric() {
+    SCMNodeStat stat = new SCMNodeStat(100L, 10L, 90L);
+    assertEquals((long) stat.getCapacity().get(), 100L);
+    assertEquals((long) stat.getScmUsed().get(), 10L);
+    assertEquals((long) stat.getRemaining().get(), 90L);
+    SCMNodeMetric metric = new SCMNodeMetric(stat);
+
+    SCMNodeStat newStat = new SCMNodeStat(100L, 10L, 90L);
+    assertEquals((long) stat.getCapacity().get(), 100L);
+    assertEquals((long) stat.getScmUsed().get(), 10L);
+    assertEquals((long) stat.getRemaining().get(), 90L);
+
+    SCMNodeMetric newMetric = new SCMNodeMetric(newStat);
+    assertTrue(metric.isEqual(newMetric.get()));
+
+    newMetric.add(stat);
+    assertTrue(newMetric.isGreater(metric.get()));
+
+    SCMNodeMetric zeroMetric = new SCMNodeMetric(new SCMNodeStat());
+    // Assert we can handle zero capacity.
+    assertTrue(metric.isGreater(zeroMetric.get()));
+
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java
new file mode 100644
index 0000000..8eb07e6
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/TestContainerSupervisor.java
@@ -0,0 +1,272 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.replication;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.container.replication.ContainerSupervisor;
+import org.apache.hadoop.hdds.scm.container.replication.InProgressPool;
+import org.apache.hadoop.hdds.scm.node.CommandQueue;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.node.NodePoolManager;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.ozone.container.common.SCMTestUtils;
+import org.apache.hadoop.ozone.container.testutils
+    .ReplicationDatanodeStateManager;
+import org.apache.hadoop.ozone.container.testutils.ReplicationNodeManagerMock;
+import org.apache.hadoop.ozone.container.testutils
+    .ReplicationNodePoolManagerMock;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.event.Level;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.apache.ratis.shaded.com.google.common.util.concurrent
+    .Uninterruptibles.sleepUninterruptibly;
+
+/**
+ * Tests for the container manager.
+ */
+public class TestContainerSupervisor {
+  final static String POOL_NAME_TEMPLATE = "Pool%d";
+  static final int MAX_DATANODES = 72;
+  static final int POOL_SIZE = 24;
+  static final int POOL_COUNT = 3;
+  private LogCapturer logCapturer = LogCapturer.captureLogs(
+      LogFactory.getLog(ContainerSupervisor.class));
+  private List<DatanodeDetails> datanodes = new LinkedList<>();
+  private NodeManager nodeManager;
+  private NodePoolManager poolManager;
+  private CommandQueue commandQueue;
+  private ContainerSupervisor containerSupervisor;
+  private ReplicationDatanodeStateManager datanodeStateManager;
+
+  @After
+  public void tearDown() throws Exception {
+    logCapturer.stopCapturing();
+    GenericTestUtils.setLogLevel(ContainerSupervisor.LOG, Level.INFO);
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    GenericTestUtils.setLogLevel(ContainerSupervisor.LOG, Level.DEBUG);
+    Map<DatanodeDetails, NodeState> nodeStateMap = new HashMap<>();
+    // We are setting up 3 pools with 24 nodes each in this cluster.
+    // First we create 72 Datanodes.
+    for (int x = 0; x < MAX_DATANODES; x++) {
+      DatanodeDetails datanode = TestUtils.getDatanodeDetails();
+      datanodes.add(datanode);
+      nodeStateMap.put(datanode, HEALTHY);
+    }
+
+    commandQueue = new CommandQueue();
+
+    // All nodes in this cluster are healthy for time being.
+    nodeManager = new ReplicationNodeManagerMock(nodeStateMap, commandQueue);
+    poolManager = new ReplicationNodePoolManagerMock();
+
+
+    Assert.assertEquals("Max datanodes should be equal to POOL_SIZE * " +
+        "POOL_COUNT", POOL_COUNT * POOL_SIZE, MAX_DATANODES);
+
+    // Start from 1 instead of zero so we can multiply and get the node index.
+    for (int y = 1; y <= POOL_COUNT; y++) {
+      String poolName = String.format(POOL_NAME_TEMPLATE, y);
+      for (int z = 0; z < POOL_SIZE; z++) {
+        DatanodeDetails id = datanodes.get(y * z);
+        poolManager.addNode(poolName, id);
+      }
+    }
+    OzoneConfiguration config = SCMTestUtils.getOzoneConf();
+    config.setTimeDuration(OZONE_SCM_CONTAINER_REPORTS_WAIT_TIMEOUT, 2,
+        TimeUnit.SECONDS);
+    config.setTimeDuration(OZONE_SCM_CONTAINER_REPORT_PROCESSING_INTERVAL, 1,
+        TimeUnit.SECONDS);
+    containerSupervisor = new ContainerSupervisor(config,
+        nodeManager, poolManager);
+    datanodeStateManager = new ReplicationDatanodeStateManager(nodeManager,
+        poolManager);
+    // Sleep for one second to make sure all threads get time to run.
+    sleepUninterruptibly(1, TimeUnit.SECONDS);
+  }
+
+  @Test
+  /**
+   * Asserts that at least one pool is picked up for processing.
+   */
+  public void testAssertPoolsAreProcessed() {
+    // This asserts that replication manager has started processing at least
+    // one pool.
+    Assert.assertTrue(containerSupervisor.getInProgressPoolCount() > 0);
+
+    // Since all datanodes are flagged as healthy in this test, for each
+    // datanode we must have queued a command.
+    Assert.assertEquals("Commands are in queue :",
+        POOL_SIZE * containerSupervisor.getInProgressPoolCount(),
+        commandQueue.getCommandsInQueue());
+  }
+
+  @Test
+  /**
+   * This test sends container reports for 2 containers to a pool in progress.
+   * Asserts that we are able to find a container with single replica and do
+   * not find container with 3 replicas.
+   */
+  public void testDetectSingleContainerReplica() throws TimeoutException,
+      InterruptedException {
+    String singleNodeContainer = "SingleNodeContainer";
+    String threeNodeContainer = "ThreeNodeContainer";
+    InProgressPool ppool = containerSupervisor.getInProcessPoolList().get(0);
+    // Only single datanode reporting that "SingleNodeContainer" exists.
+    List<ContainerReportsRequestProto> clist =
+        datanodeStateManager.getContainerReport(singleNodeContainer,
+            ppool.getPool().getPoolName(), 1);
+    ppool.handleContainerReport(clist.get(0));
+
+    // Three nodes are going to report that ThreeNodeContainer  exists.
+    clist = datanodeStateManager.getContainerReport(threeNodeContainer,
+        ppool.getPool().getPoolName(), 3);
+
+    for (ContainerReportsRequestProto reportsProto : clist) {
+      ppool.handleContainerReport(reportsProto);
+    }
+    GenericTestUtils.waitFor(() -> ppool.getContainerProcessedCount() == 4,
+        200, 1000);
+    ppool.setDoneProcessing();
+
+    List<Map.Entry<String, Integer>> containers = ppool.filterContainer(p -> p
+        .getValue() == 1);
+    Assert.assertEquals(singleNodeContainer, containers.get(0).getKey());
+    int count = containers.get(0).getValue();
+    Assert.assertEquals(1L, count);
+  }
+
+  @Test
+  /**
+   * We create three containers, Normal,OveReplicated and WayOverReplicated
+   * containers. This test asserts that we are able to find the
+   * over replicated containers.
+   */
+  public void testDetectOverReplica() throws TimeoutException,
+      InterruptedException {
+    String normalContainer = "NormalContainer";
+    String overReplicated = "OverReplicatedContainer";
+    String wayOverReplicated = "WayOverReplicated";
+    InProgressPool ppool = containerSupervisor.getInProcessPoolList().get(0);
+
+    List<ContainerReportsRequestProto> clist =
+        datanodeStateManager.getContainerReport(normalContainer,
+            ppool.getPool().getPoolName(), 3);
+    ppool.handleContainerReport(clist.get(0));
+
+    clist = datanodeStateManager.getContainerReport(overReplicated,
+        ppool.getPool().getPoolName(), 4);
+
+    for (ContainerReportsRequestProto reportsProto : clist) {
+      ppool.handleContainerReport(reportsProto);
+    }
+
+    clist = datanodeStateManager.getContainerReport(wayOverReplicated,
+        ppool.getPool().getPoolName(), 7);
+
+    for (ContainerReportsRequestProto reportsProto : clist) {
+      ppool.handleContainerReport(reportsProto);
+    }
+
+    // We ignore container reports from the same datanodes.
+    // it is possible that these each of these containers get placed
+    // on same datanodes, so allowing for 4 duplicates in the set of 14.
+    GenericTestUtils.waitFor(() -> ppool.getContainerProcessedCount() > 10,
+        200, 1000);
+    ppool.setDoneProcessing();
+
+    List<Map.Entry<String, Integer>> containers = ppool.filterContainer(p -> p
+        .getValue() > 3);
+    Assert.assertEquals(2, containers.size());
+  }
+
+  @Test
+  /**
+   * This test verifies that all pools are picked up for replica processing.
+   *
+   */
+  public void testAllPoolsAreProcessed() throws TimeoutException,
+      InterruptedException {
+    // Verify that we saw all three pools being picked up for processing.
+    GenericTestUtils.waitFor(() -> containerSupervisor.getPoolProcessCount()
+        >= 3, 200, 15 * 1000);
+    Assert.assertTrue(logCapturer.getOutput().contains("Pool1") &&
+        logCapturer.getOutput().contains("Pool2") &&
+        logCapturer.getOutput().contains("Pool3"));
+  }
+
+  @Test
+  /**
+   * Adds a new pool and tests that we are able to pick up that new pool for
+   * processing as well as handle container reports for datanodes in that pool.
+   * @throws TimeoutException
+   * @throws InterruptedException
+   */
+  public void testAddingNewPoolWorks()
+      throws TimeoutException, InterruptedException, IOException {
+    LogCapturer inProgressLog = LogCapturer.captureLogs(
+        LogFactory.getLog(InProgressPool.class));
+    GenericTestUtils.setLogLevel(InProgressPool.LOG, Level.DEBUG);
+    try {
+      DatanodeDetails id = TestUtils.getDatanodeDetails();
+      ((ReplicationNodeManagerMock) (nodeManager)).addNode(id, HEALTHY);
+      poolManager.addNode("PoolNew", id);
+      GenericTestUtils.waitFor(() ->
+              logCapturer.getOutput().contains("PoolNew"),
+          200, 15 * 1000);
+
+      // Assert that we are able to send a container report to this new
+      // pool and datanode.
+      List<ContainerReportsRequestProto> clist =
+          datanodeStateManager.getContainerReport("NewContainer1",
+              "PoolNew", 1);
+      containerSupervisor.handleContainerReport(clist.get(0));
+      GenericTestUtils.waitFor(() ->
+          inProgressLog.getOutput().contains("NewContainer1") && inProgressLog
+              .getOutput().contains(id.getUuidString()),
+          200, 10 * 1000);
+    } finally {
+      inProgressLog.stopCapturing();
+    }
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java
new file mode 100644
index 0000000..318c54d
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/replication/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.replication;
+// Test classes for replication.
\ No newline at end of file
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java
new file mode 100644
index 0000000..26f3514
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationDatanodeStateManager.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.testutils;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.node.NodePoolManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerInfo;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+
+/**
+ * This class  manages the state of datanode
+ * in conjunction with the node pool and node managers.
+ */
+public class ReplicationDatanodeStateManager {
+  private final NodeManager nodeManager;
+  private final NodePoolManager poolManager;
+  private final Random r;
+
+  /**
+   * The datanode state Manager.
+   *
+   * @param nodeManager
+   * @param poolManager
+   */
+  public ReplicationDatanodeStateManager(NodeManager nodeManager,
+      NodePoolManager poolManager) {
+    this.nodeManager = nodeManager;
+    this.poolManager = poolManager;
+    r = new Random();
+  }
+
+  /**
+   * Get Container Report as if it is from a datanode in the cluster.
+   * @param containerName - Container Name.
+   * @param poolName - Pool Name.
+   * @param dataNodeCount - Datanode Count.
+   * @return List of Container Reports.
+   */
+  public List<ContainerReportsRequestProto> getContainerReport(
+      String containerName, String poolName, int dataNodeCount) {
+    List<ContainerReportsRequestProto> containerList = new LinkedList<>();
+    List<DatanodeDetails> nodesInPool = poolManager.getNodes(poolName);
+
+    if (nodesInPool == null) {
+      return containerList;
+    }
+
+    if (nodesInPool.size() < dataNodeCount) {
+      throw new IllegalStateException("Not enough datanodes to create " +
+          "required container reports");
+    }
+
+    int containerID = 1;
+    while (containerList.size() < dataNodeCount && nodesInPool.size() > 0) {
+      DatanodeDetails id = nodesInPool.get(r.nextInt(nodesInPool.size()));
+      nodesInPool.remove(id);
+      containerID++;
+      // We return container reports only for nodes that are healthy.
+      if (nodeManager.getNodeState(id) == HEALTHY) {
+        ContainerInfo info = ContainerInfo.newBuilder()
+            .setContainerName(containerName)
+            .setFinalhash(DigestUtils.sha256Hex(containerName))
+            .setContainerID(containerID)
+            .build();
+        ContainerReportsRequestProto containerReport =
+            ContainerReportsRequestProto.newBuilder().addReports(info)
+            .setDatanodeDetails(id.getProtoBufMessage())
+            .setType(ContainerReportsRequestProto.reportType.fullReport)
+            .build();
+        containerList.add(containerReport);
+      }
+    }
+    return containerList;
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java
new file mode 100644
index 0000000..f2db751
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodeManagerMock.java
@@ -0,0 +1,326 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.testutils;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeMetric;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
+import org.apache.hadoop.hdds.scm.node.CommandQueue;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.hdds.scm.node.NodePoolManager;
+import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto;
+import org.apache.hadoop.ozone.protocol.VersionResponse;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.mockito.Mockito;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+/**
+ * A Node Manager to test replication.
+ */
+public class ReplicationNodeManagerMock implements NodeManager {
+  private final Map<DatanodeDetails, NodeState> nodeStateMap;
+  private final CommandQueue commandQueue;
+
+  /**
+   * A list of Datanodes and current states.
+   * @param nodeState A node state map.
+   */
+  public ReplicationNodeManagerMock(Map<DatanodeDetails, NodeState> nodeState,
+                                    CommandQueue commandQueue) {
+    Preconditions.checkNotNull(nodeState);
+    this.nodeStateMap = nodeState;
+    this.commandQueue = commandQueue;
+  }
+
+  /**
+   * Get the minimum number of nodes to get out of chill mode.
+   *
+   * @return int
+   */
+  @Override
+  public int getMinimumChillModeNodes() {
+    return 0;
+  }
+
+  /**
+   * Returns a chill mode status string.
+   *
+   * @return String
+   */
+  @Override
+  public String getChillModeStatus() {
+    return null;
+  }
+
+  /**
+   * Get the number of data nodes that in all states.
+   *
+   * @return A state to number of nodes that in this state mapping
+   */
+  @Override
+  public Map<String, Integer> getNodeCount() {
+    return null;
+  }
+
+  /**
+   * Removes a data node from the management of this Node Manager.
+   *
+   * @param node - DataNode.
+   * @throws UnregisteredNodeException
+   */
+  @Override
+  public void removeNode(DatanodeDetails node)
+      throws UnregisteredNodeException {
+    nodeStateMap.remove(node);
+
+  }
+
+  /**
+   * Gets all Live Datanodes that is currently communicating with SCM.
+   *
+   * @param nodestate - State of the node
+   * @return List of Datanodes that are Heartbeating SCM.
+   */
+  @Override
+  public List<DatanodeDetails> getNodes(NodeState nodestate) {
+    return null;
+  }
+
+  /**
+   * Returns the Number of Datanodes that are communicating with SCM.
+   *
+   * @param nodestate - State of the node
+   * @return int -- count
+   */
+  @Override
+  public int getNodeCount(NodeState nodestate) {
+    return 0;
+  }
+
+  /**
+   * Get all datanodes known to SCM.
+   *
+   * @return List of DatanodeDetails known to SCM.
+   */
+  @Override
+  public List<DatanodeDetails> getAllNodes() {
+    return null;
+  }
+
+  /**
+   * Chill mode is the period when node manager waits for a minimum
+   * configured number of datanodes to report in. This is called chill mode
+   * to indicate the period before node manager gets into action.
+   * <p>
+   * Forcefully exits the chill mode, even if we have not met the minimum
+   * criteria of the nodes reporting in.
+   */
+  @Override
+  public void forceExitChillMode() {
+
+  }
+
+  /**
+   * Puts the node manager into manual chill mode.
+   */
+  @Override
+  public void enterChillMode() {
+
+  }
+
+  /**
+   * Brings node manager out of manual chill mode.
+   */
+  @Override
+  public void exitChillMode() {
+
+  }
+
+  /**
+   * Returns true if node manager is out of chill mode, else false.
+   * @return true if out of chill mode, else false
+   */
+  @Override
+  public boolean isOutOfChillMode() {
+    return !nodeStateMap.isEmpty();
+  }
+
+  /**
+   * Returns the aggregated node stats.
+   *
+   * @return the aggregated node stats.
+   */
+  @Override
+  public SCMNodeStat getStats() {
+    return null;
+  }
+
+  /**
+   * Return a map of node stats.
+   *
+   * @return a map of individual node stats (live/stale but not dead).
+   */
+  @Override
+  public Map<UUID, SCMNodeStat> getNodeStats() {
+    return null;
+  }
+
+  /**
+   * Return the node stat of the specified datanode.
+   *
+   * @param dd - datanode details.
+   * @return node stat if it is live/stale, null if it is dead or does't exist.
+   */
+  @Override
+  public SCMNodeMetric getNodeStat(DatanodeDetails dd) {
+    return null;
+  }
+
+  @Override
+  public NodePoolManager getNodePoolManager() {
+    return Mockito.mock(NodePoolManager.class);
+  }
+
+  /**
+   * Wait for the heartbeat is processed by NodeManager.
+   *
+   * @return true if heartbeat has been processed.
+   */
+  @Override
+  public boolean waitForHeartbeatProcessed() {
+    return false;
+  }
+
+  /**
+   * Returns the node state of a specific node.
+   *
+   * @param dd - DatanodeDetails
+   * @return Healthy/Stale/Dead.
+   */
+  @Override
+  public NodeState getNodeState(DatanodeDetails dd) {
+    return nodeStateMap.get(dd);
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated
+   * with it. If the stream is already closed then invoking this
+   * method has no effect.
+   * <p>
+   * <p> As noted in {@link AutoCloseable#close()}, cases where the
+   * close may fail require careful attention. It is strongly advised
+   * to relinquish the underlying resources and to internally
+   * <em>mark</em> the {@code Closeable} as closed, prior to throwing
+   * the {@code IOException}.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+
+  }
+
+  /**
+   * When an object implementing interface <code>Runnable</code> is used
+   * to create a thread, starting the thread causes the object's
+   * <code>run</code> method to be called in that separately executing
+   * thread.
+   * <p>
+   * The general contract of the method <code>run</code> is that it may
+   * take any action whatsoever.
+   *
+   * @see Thread#run()
+   */
+  @Override
+  public void run() {
+
+  }
+
+  /**
+   * Gets the version info from SCM.
+   *
+   * @param versionRequest - version Request.
+   * @return - returns SCM version info and other required information needed by
+   * datanode.
+   */
+  @Override
+  public VersionResponse getVersion(SCMVersionRequestProto versionRequest) {
+    return null;
+  }
+
+  /**
+   * Register the node if the node finds that it is not registered with any SCM.
+   *
+   * @param dd DatanodeDetailsProto
+   *
+   * @return SCMHeartbeatResponseProto
+   */
+  @Override
+  public SCMCommand register(HddsProtos.DatanodeDetailsProto dd) {
+    return null;
+  }
+
+  /**
+   * Send heartbeat to indicate the datanode is alive and doing well.
+   *
+   * @param dd - Datanode Details.
+   * @param nodeReport - node report.
+   * @param containerReportState - container report state.
+   * @return SCMheartbeat response list
+   */
+  @Override
+  public List<SCMCommand> sendHeartbeat(HddsProtos.DatanodeDetailsProto dd,
+      SCMNodeReport nodeReport, ReportState containerReportState) {
+    return null;
+  }
+
+  /**
+   * Clears all nodes from the node Manager.
+   */
+  public void clearMap() {
+    this.nodeStateMap.clear();
+  }
+
+  /**
+   * Adds a node to the existing Node manager. This is used only for test
+   * purposes.
+   * @param id DatanodeDetails
+   * @param state State you want to put that node to.
+   */
+  public void addNode(DatanodeDetails id, NodeState state) {
+    nodeStateMap.put(id, state);
+  }
+
+  @Override
+  public void addDatanodeCommand(UUID dnId, SCMCommand command) {
+    this.commandQueue.addCommand(dnId, command);
+  }
+
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java
new file mode 100644
index 0000000..ffcd752
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/ReplicationNodePoolManagerMock.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.testutils;
+
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.node.NodePoolManager;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Pool Manager replication mock.
+ */
+public class ReplicationNodePoolManagerMock implements NodePoolManager {
+
+  private final Map<DatanodeDetails, String> nodeMemberShip;
+
+  /**
+   * A node pool manager for testing.
+   */
+  public ReplicationNodePoolManagerMock() {
+    nodeMemberShip = new HashMap<>();
+  }
+
+  /**
+   * Add a node to a node pool.
+   *
+   * @param pool - name of the node pool.
+   * @param node - data node.
+   */
+  @Override
+  public void addNode(String pool, DatanodeDetails node) {
+    nodeMemberShip.put(node, pool);
+  }
+
+  /**
+   * Remove a node from a node pool.
+   *
+   * @param pool - name of the node pool.
+   * @param node - data node.
+   * @throws SCMException
+   */
+  @Override
+  public void removeNode(String pool, DatanodeDetails node)
+      throws SCMException {
+    nodeMemberShip.remove(node);
+
+  }
+
+  /**
+   * Get a list of known node pools.
+   *
+   * @return a list of known node pool names or an empty list if not node pool
+   * is defined.
+   */
+  @Override
+  public List<String> getNodePools() {
+    Set<String> poolSet = new HashSet<>();
+    for (Map.Entry<DatanodeDetails, String> entry : nodeMemberShip.entrySet()) {
+      poolSet.add(entry.getValue());
+    }
+    return new ArrayList<>(poolSet);
+
+  }
+
+  /**
+   * Get all nodes of a node pool given the name of the node pool.
+   *
+   * @param pool - name of the node pool.
+   * @return a list of datanode ids or an empty list if the node pool was not
+   * found.
+   */
+  @Override
+  public List<DatanodeDetails> getNodes(String pool) {
+    Set<DatanodeDetails> datanodeSet = new HashSet<>();
+    for (Map.Entry<DatanodeDetails, String> entry : nodeMemberShip.entrySet()) {
+      if (entry.getValue().equals(pool)) {
+        datanodeSet.add(entry.getKey());
+      }
+    }
+    return new ArrayList<>(datanodeSet);
+  }
+
+  /**
+   * Get the node pool name if the node has been added to a node pool.
+   *
+   * @param datanodeDetails DatanodeDetails.
+   * @return node pool name if it has been assigned. null if the node has not
+   * been assigned to any node pool yet.
+   */
+  @Override
+  public String getNodePool(DatanodeDetails datanodeDetails) {
+    return nodeMemberShip.get(datanodeDetails);
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated
+   * with it. If the stream is already closed then invoking this
+   * method has no effect.
+   * <p>
+   * <p> As noted in {@link AutoCloseable#close()}, cases where the
+   * close may fail require careful attention. It is strongly advised
+   * to relinquish the underlying resources and to internally
+   * <em>mark</em> the {@code Closeable} as closed, prior to throwing
+   * the {@code IOException}.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+
+  }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java
new file mode 100644
index 0000000..4e8a90b
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/ozone/container/testutils/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.container.testutils;
+// Helper classes for ozone and container tests.
\ No newline at end of file
diff --git a/hadoop-hdds/tools/pom.xml b/hadoop-hdds/tools/pom.xml
new file mode 100644
index 0000000..acc6711
--- /dev/null
+++ b/hadoop-hdds/tools/pom.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-hdds</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>hadoop-hdds-tools</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache HDDS Tools</description>
+  <name>Apache Hadoop HDDS tools</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdds</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.xerial</groupId>
+      <artifactId>sqlite-jdbc</artifactId>
+      <version>3.8.7</version>
+    </dependency>
+
+
+  </dependencies>
+</project>
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java
new file mode 100644
index 0000000..727c81a
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.util.Tool;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * This class is the base CLI for scm, ksm and scmadm.
+ */
+public abstract class OzoneBaseCLI extends Configured implements Tool {
+
+  protected abstract int dispatch(CommandLine cmd, Options opts)
+      throws IOException, URISyntaxException;
+
+  protected abstract CommandLine parseArgs(String[] argv, Options opts)
+      throws ParseException;
+
+  protected abstract Options getOptions();
+
+  protected abstract void displayHelp();
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java
new file mode 100644
index 0000000..641dd0e
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+
+import java.io.IOException;
+import java.io.PrintStream;
+
+/**
+ * The abstract class of all SCM CLI commands.
+ */
+public abstract class OzoneCommandHandler {
+
+  private ScmClient scmClient;
+  private PrintStream out = System.out;
+  private PrintStream err = System.err;
+
+  /**
+   * Constructs a handler object.
+   */
+  public OzoneCommandHandler(ScmClient scmClient) {
+    this.scmClient = scmClient;
+  }
+
+  protected ScmClient getScmClient() {
+    return scmClient;
+  }
+
+  /**
+   * Sets customized output stream to redirect the stdout to somewhere else.
+   * @param out
+   */
+  public void setOut(PrintStream out) {
+    this.out = out;
+  }
+
+  /**
+   * Sets customized error stream to redirect the stderr to somewhere else.
+   * @param err
+   */
+  public void setErr(PrintStream err) {
+    this.err = err;
+  }
+
+  public void logOut(String msg, String... variable) {
+    this.out.println(String.format(msg, variable));
+  }
+
+  /**
+   * Executes the Client command.
+   *
+   * @param cmd - CommandLine.
+   * @throws IOException throws exception.
+   */
+  public abstract void execute(CommandLine cmd) throws IOException;
+
+  /**
+   * Display a help message describing the options the command takes.
+   * TODO : currently only prints to standard out, may want to change this.
+   */
+  public abstract void displayHelp();
+
+  public PrintStream getOut() {
+    return out;
+  }
+
+  public PrintStream getErr() {
+    return err;
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java
new file mode 100644
index 0000000..27df88c
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli;
+
+/**
+ * The possible result code of SCM CLI.
+ */
+public final class ResultCode {
+  public static final int SUCCESS = 1;
+
+  public static final int UNRECOGNIZED_CMD = 2;
+
+  public static final int EXECUTION_ERROR = 3;
+
+  private ResultCode() {}
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java
new file mode 100644
index 0000000..8d71d00
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.cli.container.ContainerCommandHandler;
+import org.apache.hadoop.hdds.scm.cli.container.CreateContainerHandler;
+import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ipc.Client;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.ToolRunner;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.InetSocketAddress;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CONTAINER_SIZE_GB;
+import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients;
+import static org.apache.hadoop.hdds.scm.cli.ResultCode.EXECUTION_ERROR;
+import static org.apache.hadoop.hdds.scm.cli.ResultCode.SUCCESS;
+import static org.apache.hadoop.hdds.scm.cli.ResultCode.UNRECOGNIZED_CMD;
+
+/**
+ * This class is the CLI of SCM.
+ */
+public class SCMCLI extends OzoneBaseCLI {
+
+  public static final String HELP_OP = "help";
+  public static final int CMD_WIDTH = 80;
+
+  private final ScmClient scmClient;
+  private final PrintStream out;
+  private final PrintStream err;
+
+  private final Options options;
+
+  public SCMCLI(ScmClient scmClient) {
+    this(scmClient, System.out, System.err);
+  }
+
+  public SCMCLI(ScmClient scmClient, PrintStream out, PrintStream err) {
+    this.scmClient = scmClient;
+    this.out = out;
+    this.err = err;
+    this.options = getOptions();
+  }
+
+  /**
+   * Main for the scm shell Command handling.
+   *
+   * @param argv - System Args Strings[]
+   * @throws Exception
+   */
+  public static void main(String[] argv) throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    ScmClient scmClient = getScmClient(conf);
+    SCMCLI shell = new SCMCLI(scmClient);
+    conf.setQuietMode(false);
+    shell.setConf(conf);
+    int res = 0;
+    try {
+      res = ToolRunner.run(shell, argv);
+    } catch (Exception ex) {
+      System.exit(1);
+    }
+    System.exit(res);
+  }
+
+  private static ScmClient getScmClient(OzoneConfiguration ozoneConf)
+      throws IOException {
+    long version = RPC.getProtocolVersion(
+        StorageContainerLocationProtocolPB.class);
+    InetSocketAddress scmAddress =
+        getScmAddressForClients(ozoneConf);
+    int containerSizeGB = ozoneConf.getInt(OZONE_SCM_CONTAINER_SIZE_GB,
+        OZONE_SCM_CONTAINER_SIZE_DEFAULT);
+    ContainerOperationClient.setContainerSizeB(containerSizeGB*OzoneConsts.GB);
+
+    RPC.setProtocolEngine(ozoneConf, StorageContainerLocationProtocolPB.class,
+        ProtobufRpcEngine.class);
+    StorageContainerLocationProtocolClientSideTranslatorPB client =
+        new StorageContainerLocationProtocolClientSideTranslatorPB(
+            RPC.getProxy(StorageContainerLocationProtocolPB.class, version,
+                scmAddress, UserGroupInformation.getCurrentUser(), ozoneConf,
+                NetUtils.getDefaultSocketFactory(ozoneConf),
+                Client.getRpcTimeout(ozoneConf)));
+    ScmClient storageClient = new ContainerOperationClient(
+        client, new XceiverClientManager(ozoneConf));
+    return storageClient;
+  }
+
+  /**
+   * Adds ALL the options that hdfs scm command supports. Given the hierarchy
+   * of commands, the options are added in a cascading manner, e.g.:
+   * {@link SCMCLI} asks {@link ContainerCommandHandler} to add it's options,
+   * which then asks it's sub command, such as
+   * {@link CreateContainerHandler}
+   * to add it's own options.
+   *
+   * We need to do this because {@link BasicParser} need to take all the options
+   * when paring args.
+   * @return ALL the options supported by this CLI.
+   */
+  @Override
+  protected Options getOptions() {
+    Options newOptions = new Options();
+    // add the options
+    addTopLevelOptions(newOptions);
+    ContainerCommandHandler.addOptions(newOptions);
+    // TODO : add pool, node and pipeline commands.
+    addHelpOption(newOptions);
+    return newOptions;
+  }
+
+  private static void addTopLevelOptions(Options options) {
+    Option containerOps =
+        new Option(ContainerCommandHandler.CONTAINER_CMD, false,
+            "Container related options");
+    options.addOption(containerOps);
+    // TODO : add pool, node and pipeline commands.
+  }
+
+  private static void addHelpOption(Options options) {
+    Option helpOp = new Option(HELP_OP, false, "display help message");
+    options.addOption(helpOp);
+  }
+
+  @Override
+  protected void displayHelp() {
+    HelpFormatter helpFormatter = new HelpFormatter();
+    Options topLevelOptions = new Options();
+    addTopLevelOptions(topLevelOptions);
+    helpFormatter.printHelp(CMD_WIDTH, "hdfs scmcli <commands> [<options>]",
+        "where <commands> can be one of the following",
+        topLevelOptions, "");
+  }
+
+  @Override
+  public int run(String[] args) throws Exception {
+    CommandLine cmd = parseArgs(args, options);
+    if (cmd == null) {
+      err.println("Unrecognized options:" + Arrays.asList(args));
+      displayHelp();
+      return UNRECOGNIZED_CMD;
+    }
+    return dispatch(cmd, options);
+  }
+
+  /**
+   * This function parses all command line arguments
+   * and returns the appropriate values.
+   *
+   * @param argv - Argv from main
+   *
+   * @return CommandLine
+   */
+  @Override
+  protected CommandLine parseArgs(String[] argv, Options opts)
+      throws ParseException {
+    try {
+      BasicParser parser = new BasicParser();
+      return parser.parse(opts, argv);
+    } catch (ParseException ex) {
+      err.println(ex.getMessage());
+    }
+    return null;
+  }
+
+  @Override
+  protected int dispatch(CommandLine cmd, Options opts)
+      throws IOException, URISyntaxException {
+    OzoneCommandHandler handler = null;
+    try {
+      if (cmd.hasOption(ContainerCommandHandler.CONTAINER_CMD)) {
+        handler = new ContainerCommandHandler(scmClient);
+      }
+
+      if (handler == null) {
+        if (cmd.hasOption(HELP_OP)) {
+          displayHelp();
+          return SUCCESS;
+        } else {
+          displayHelp();
+          err.println("Unrecognized command: " + Arrays.asList(cmd.getArgs()));
+          return UNRECOGNIZED_CMD;
+        }
+      } else {
+        // Redirect stdout and stderr if necessary.
+        handler.setOut(this.out);
+        handler.setErr(this.err);
+        handler.execute(cmd);
+        return SUCCESS;
+      }
+    } catch (IOException ioe) {
+      err.println("Error executing command:" + ioe);
+      return EXECUTION_ERROR;
+    }
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java
new file mode 100644
index 0000000..9a44525
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.container;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler;
+import org.apache.hadoop.hdds.scm.cli.SCMCLI;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+
+import java.io.IOException;
+
+/**
+ * The handler of close container command.
+ */
+public class CloseContainerHandler extends OzoneCommandHandler {
+
+  public static final String CONTAINER_CLOSE = "close";
+  public static final String OPT_CONTAINER_NAME = "c";
+
+  @Override
+  public void execute(CommandLine cmd) throws IOException {
+    if (!cmd.hasOption(CONTAINER_CLOSE)) {
+      throw new IOException("Expecting container close");
+    }
+    if (!cmd.hasOption(OPT_CONTAINER_NAME)) {
+      displayHelp();
+      if (!cmd.hasOption(SCMCLI.HELP_OP)) {
+        throw new IOException("Expecting container name");
+      } else {
+        return;
+      }
+    }
+    String containerName = cmd.getOptionValue(OPT_CONTAINER_NAME);
+
+    Pipeline pipeline = getScmClient().getContainer(containerName);
+    if (pipeline == null) {
+      throw new IOException("Cannot close an non-exist container "
+          + containerName);
+    }
+    logOut("Closing container : %s.", containerName);
+    getScmClient().closeContainer(pipeline);
+    logOut("Container closed.");
+  }
+
+  @Override
+  public void displayHelp() {
+    Options options = new Options();
+    addOptions(options);
+    HelpFormatter helpFormatter = new HelpFormatter();
+    helpFormatter
+        .printHelp(SCMCLI.CMD_WIDTH, "hdfs scm -container -close <option>",
+            "where <option> is", options, "");
+  }
+
+  public static void addOptions(Options options) {
+    Option containerNameOpt = new Option(OPT_CONTAINER_NAME,
+        true, "Specify container name");
+    options.addOption(containerNameOpt);
+  }
+
+  CloseContainerHandler(ScmClient client) {
+    super(client);
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommandHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommandHandler.java
new file mode 100644
index 0000000..980388f
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommandHandler.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.container;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH;
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP;
+import static org.apache.hadoop.hdds.scm.cli.container.CloseContainerHandler
+    .CONTAINER_CLOSE;
+import static org.apache.hadoop.hdds.scm.cli.container.CreateContainerHandler
+    .CONTAINER_CREATE;
+import static org.apache.hadoop.hdds.scm.cli.container.DeleteContainerHandler
+    .CONTAINER_DELETE;
+import static org.apache.hadoop.hdds.scm.cli.container.InfoContainerHandler
+    .CONTAINER_INFO;
+import static org.apache.hadoop.hdds.scm.cli.container.ListContainerHandler
+    .CONTAINER_LIST;
+
+/**
+ * The handler class of container-specific commands, e.g. addContainer.
+ */
+public class ContainerCommandHandler extends OzoneCommandHandler {
+
+  public static final String CONTAINER_CMD = "container";
+
+  public ContainerCommandHandler(ScmClient scmClient) {
+    super(scmClient);
+  }
+
+  @Override
+  public void execute(CommandLine cmd) throws IOException {
+    // all container commands should contain -container option
+    if (!cmd.hasOption(CONTAINER_CMD)) {
+      throw new IOException("Expecting container cmd");
+    }
+    // check which each the sub command it is
+    OzoneCommandHandler handler = null;
+    if (cmd.hasOption(CONTAINER_CREATE)) {
+      handler = new CreateContainerHandler(getScmClient());
+    } else if (cmd.hasOption(CONTAINER_DELETE)) {
+      handler = new DeleteContainerHandler(getScmClient());
+    } else if (cmd.hasOption(CONTAINER_INFO)) {
+      handler = new InfoContainerHandler(getScmClient());
+    } else if (cmd.hasOption(CONTAINER_LIST)) {
+      handler = new ListContainerHandler(getScmClient());
+    } else if (cmd.hasOption(CONTAINER_CLOSE)) {
+      handler = new CloseContainerHandler(getScmClient());
+    }
+
+    // execute the sub command, throw exception if no sub command found
+    // unless -help option is given.
+    if (handler != null) {
+      handler.setOut(this.getOut());
+      handler.setErr(this.getErr());
+      handler.execute(cmd);
+    } else {
+      displayHelp();
+      if (!cmd.hasOption(HELP_OP)) {
+        throw new IOException("Unrecognized command "
+            + Arrays.asList(cmd.getArgs()));
+      }
+    }
+  }
+
+  @Override
+  public void displayHelp() {
+    Options options = new Options();
+    addCommandsOption(options);
+    HelpFormatter helpFormatter = new HelpFormatter();
+    helpFormatter.printHelp(CMD_WIDTH,
+        "hdfs scm -container <commands> <options>",
+        "where <commands> can be one of the following", options, "");
+  }
+
+  private static void addCommandsOption(Options options) {
+    Option createContainer =
+        new Option(CONTAINER_CREATE, false, "Create container");
+    Option infoContainer =
+        new Option(CONTAINER_INFO, false, "Info container");
+    Option deleteContainer =
+        new Option(CONTAINER_DELETE, false, "Delete container");
+    Option listContainer =
+        new Option(CONTAINER_LIST, false, "List container");
+    Option closeContainer =
+        new Option(CONTAINER_CLOSE, false, "Close container");
+
+    options.addOption(createContainer);
+    options.addOption(deleteContainer);
+    options.addOption(infoContainer);
+    options.addOption(listContainer);
+    options.addOption(closeContainer);
+    // Every new option should add it's option here.
+  }
+
+  public static void addOptions(Options options) {
+    addCommandsOption(options);
+    // for create container options.
+    CreateContainerHandler.addOptions(options);
+    DeleteContainerHandler.addOptions(options);
+    InfoContainerHandler.addOptions(options);
+    ListContainerHandler.addOptions(options);
+    CloseContainerHandler.addOptions(options);
+    // Every new option should add it's option here.
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateContainerHandler.java
new file mode 100644
index 0000000..2961831
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateContainerHandler.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.container;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+
+import java.io.IOException;
+
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH;
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP;
+
+/**
+ * This is the handler that process container creation command.
+ */
+public class CreateContainerHandler extends OzoneCommandHandler {
+
+  public static final String CONTAINER_CREATE = "create";
+  public static final String OPT_CONTAINER_NAME = "c";
+  public static final String CONTAINER_OWNER = "OZONE";
+  // TODO Support an optional -p <pipelineID> option to create
+  // container on given datanodes.
+
+  public CreateContainerHandler(ScmClient scmClient) {
+    super(scmClient);
+  }
+
+  @Override
+  public void execute(CommandLine cmd) throws IOException {
+    if (!cmd.hasOption(CONTAINER_CREATE)) {
+      throw new IOException("Expecting container create");
+    }
+    if (!cmd.hasOption(OPT_CONTAINER_NAME)) {
+      displayHelp();
+      if (!cmd.hasOption(HELP_OP)) {
+        throw new IOException("Expecting container name");
+      } else {
+        return;
+      }
+    }
+    String containerName = cmd.getOptionValue(OPT_CONTAINER_NAME);
+
+    logOut("Creating container : %s.", containerName);
+    getScmClient().createContainer(containerName, CONTAINER_OWNER);
+    logOut("Container created.");
+  }
+
+  @Override
+  public void displayHelp() {
+    Options options = new Options();
+    addOptions(options);
+    HelpFormatter helpFormatter = new HelpFormatter();
+    helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -create <option>",
+        "where <option> is", options, "");
+  }
+
+  public static void addOptions(Options options) {
+    Option containerNameOpt = new Option(OPT_CONTAINER_NAME,
+        true, "Specify container name");
+    options.addOption(containerNameOpt);
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteContainerHandler.java
new file mode 100644
index 0000000..a5b625a
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteContainerHandler.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.cli.container;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+
+import java.io.IOException;
+
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH;
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP;
+
+/**
+ * This is the handler that process delete container command.
+ */
+public class DeleteContainerHandler extends OzoneCommandHandler {
+
+  protected static final String CONTAINER_DELETE = "delete";
+  protected static final String OPT_FORCE = "f";
+  protected static final String OPT_CONTAINER_NAME = "c";
+
+  public DeleteContainerHandler(ScmClient scmClient) {
+    super(scmClient);
+  }
+
+  @Override
+  public void execute(CommandLine cmd) throws IOException {
+    Preconditions.checkArgument(cmd.hasOption(CONTAINER_DELETE),
+        "Expecting command delete");
+    if (!cmd.hasOption(OPT_CONTAINER_NAME)) {
+      displayHelp();
+      if (!cmd.hasOption(HELP_OP)) {
+        throw new IOException("Expecting container name");
+      } else {
+        return;
+      }
+    }
+
+    String containerName = cmd.getOptionValue(OPT_CONTAINER_NAME);
+
+    Pipeline pipeline = getScmClient().getContainer(containerName);
+    if (pipeline == null) {
+      throw new IOException("Cannot delete an non-exist container "
+          + containerName);
+    }
+
+    logOut("Deleting container : %s.", containerName);
+    getScmClient().deleteContainer(pipeline, cmd.hasOption(OPT_FORCE));
+    logOut("Container %s deleted.", containerName);
+  }
+
+  @Override
+  public void displayHelp() {
+    Options options = new Options();
+    addOptions(options);
+    HelpFormatter helpFormatter = new HelpFormatter();
+    helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -delete <option>",
+        "where <option> is", options, "");
+  }
+
+  public static void addOptions(Options options) {
+    Option forceOpt = new Option(OPT_FORCE,
+        false,
+        "forcibly delete a container");
+    options.addOption(forceOpt);
+    Option containerNameOpt = new Option(OPT_CONTAINER_NAME,
+        true, "Specify container name");
+    options.addOption(containerNameOpt);
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoContainerHandler.java
new file mode 100644
index 0000000..c609915
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoContainerHandler.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.container;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+
+import java.io.IOException;
+import java.util.stream.Collectors;
+
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH;
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP;
+
+/**
+ * This is the handler that process container info command.
+ */
+public class InfoContainerHandler extends OzoneCommandHandler {
+
+  public static final String CONTAINER_INFO = "info";
+  protected static final String OPT_CONTAINER_NAME = "c";
+
+  /**
+   * Constructs a handler object.
+   *
+   * @param scmClient scm client.
+   */
+  public InfoContainerHandler(ScmClient scmClient) {
+    super(scmClient);
+  }
+
+  @Override
+  public void execute(CommandLine cmd) throws IOException {
+    if (!cmd.hasOption(CONTAINER_INFO)) {
+      throw new IOException("Expecting container info");
+    }
+    if (!cmd.hasOption(OPT_CONTAINER_NAME)) {
+      displayHelp();
+      if (!cmd.hasOption(HELP_OP)) {
+        throw new IOException("Expecting container name");
+      } else {
+        return;
+      }
+    }
+    String containerName = cmd.getOptionValue(OPT_CONTAINER_NAME);
+    Pipeline pipeline = getScmClient().getContainer(containerName);
+    Preconditions.checkNotNull(pipeline, "Pipeline cannot be null");
+
+    ContainerData containerData =
+        getScmClient().readContainer(pipeline);
+
+    // Print container report info.
+    logOut("Container Name: %s",
+        containerData.getName());
+    String openStatus =
+        containerData.getState() == HddsProtos.LifeCycleState.OPEN ? "OPEN" :
+            "CLOSED";
+    logOut("Container State: %s", openStatus);
+    if (!containerData.getHash().isEmpty()) {
+      logOut("Container Hash: %s", containerData.getHash());
+    }
+    logOut("Container DB Path: %s", containerData.getDbPath());
+    logOut("Container Path: %s", containerData.getContainerPath());
+
+    // Output meta data.
+    String metadataStr = containerData.getMetadataList().stream().map(
+        p -> p.getKey() + ":" + p.getValue()).collect(Collectors.joining(", "));
+    logOut("Container Metadata: {%s}", metadataStr);
+
+    // Print pipeline of an existing container.
+    logOut("LeaderID: %s", pipeline.getLeader().getHostName());
+    String machinesStr = pipeline.getMachines().stream().map(
+        DatanodeDetails::getHostName).collect(Collectors.joining(","));
+    logOut("Datanodes: [%s]", machinesStr);
+  }
+
+  @Override
+  public void displayHelp() {
+    Options options = new Options();
+    addOptions(options);
+    HelpFormatter helpFormatter = new HelpFormatter();
+    helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -info <option>",
+        "where <option> is", options, "");
+  }
+
+  public static void addOptions(Options options) {
+    Option containerNameOpt = new Option(OPT_CONTAINER_NAME,
+        true, "Specify container name");
+    options.addOption(containerNameOpt);
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListContainerHandler.java
new file mode 100644
index 0000000..0c7e790
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListContainerHandler.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.container;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import java.io.IOException;
+import java.util.List;
+
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH;
+import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP;
+
+/**
+ * This is the handler that process container list command.
+ */
+public class ListContainerHandler extends OzoneCommandHandler {
+
+  public static final String CONTAINER_LIST = "list";
+  public static final String OPT_START_CONTAINER = "start";
+  public static final String OPT_PREFIX_CONTAINER = "prefix";
+  public static final String OPT_COUNT = "count";
+
+  /**
+   * Constructs a handler object.
+   *
+   * @param scmClient scm client
+   */
+  public ListContainerHandler(ScmClient scmClient) {
+    super(scmClient);
+  }
+
+  @Override
+  public void execute(CommandLine cmd) throws IOException {
+    if (!cmd.hasOption(CONTAINER_LIST)) {
+      throw new IOException("Expecting container list");
+    }
+    if (cmd.hasOption(HELP_OP)) {
+      displayHelp();
+      return;
+    }
+
+    if (!cmd.hasOption(OPT_COUNT)) {
+      displayHelp();
+      if (!cmd.hasOption(HELP_OP)) {
+        throw new IOException("Expecting container count");
+      } else {
+        return;
+      }
+    }
+
+    String startName = cmd.getOptionValue(OPT_START_CONTAINER);
+    String prefixName = cmd.getOptionValue(OPT_PREFIX_CONTAINER);
+    int count = 0;
+
+    if (cmd.hasOption(OPT_COUNT)) {
+      count = Integer.parseInt(cmd.getOptionValue(OPT_COUNT));
+      if (count < 0) {
+        displayHelp();
+        throw new IOException("-count should not be negative");
+      }
+    }
+
+    List<ContainerInfo> containerList =
+        getScmClient().listContainer(startName, prefixName, count);
+
+    // Output data list
+    for (ContainerInfo container : containerList) {
+      outputContainerPipeline(container.getPipeline());
+    }
+  }
+
+  private void outputContainerPipeline(Pipeline pipeline) throws IOException {
+    // Print container report info.
+    logOut("%s", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+        pipeline.toJsonString()));
+  }
+
+  @Override
+  public void displayHelp() {
+    Options options = new Options();
+    addOptions(options);
+    HelpFormatter helpFormatter = new HelpFormatter();
+    helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -list <option>",
+        "where <option> can be the following", options, "");
+  }
+
+  public static void addOptions(Options options) {
+    Option startContainerOpt = new Option(OPT_START_CONTAINER,
+        true, "Specify start container name");
+    Option endContainerOpt = new Option(OPT_PREFIX_CONTAINER,
+        true, "Specify prefix container name");
+    Option countOpt = new Option(OPT_COUNT, true,
+        "Specify count number, required");
+    options.addOption(countOpt);
+    options.addOption(startContainerOpt);
+    options.addOption(endContainerOpt);
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java
new file mode 100644
index 0000000..0630df2
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.cli.container;
\ No newline at end of file
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java
new file mode 100644
index 0000000..4762d55
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.cli;
\ No newline at end of file
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html
index e474ab5..2856265 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/datanode.html
@@ -95,6 +95,52 @@
   {/dn.BPServiceActorInfo}
 </table>
 
+{#ozone.enabled}
+<div class="page-header"><h1>Ozone: SCM Connections</h1></div>
+<table class="table">
+  <thead>
+  <tr>
+    <th>SCM Address</th>
+    <th>Status</th>
+    <th>Version</th>
+    <th>Missed count</th>
+    <th>Last heartbeat</th>
+  </tr>
+  </thead>
+  {#ozone.SCMServers}
+  <tr>
+    <td>{addressString}</td>
+    <td>{state}</td>
+    <td>{versionNumber}</td>
+    <td>{missedCount}s</td>
+    <td>{lastSuccessfulHeartbeat|elapsed|fmt_time}</td>
+  </tr>
+  {/ozone.SCMServers}
+</table>
+
+<div class="page-header"><h1>Ozone: Storage locations</h1></div>
+<table class="table">
+  <thead>
+  <tr>
+    <th>ID</th>
+    <th>Capacity</th>
+    <th>Remaining</th>
+    <th>SCM used</th>
+    <th>failed</th>
+  </tr>
+  </thead>
+  {#ozone.LocationReport}
+  <tr>
+    <td>{id}</td>
+    <td>{capacity|fmt_bytes}</td>
+    <td>{remaining|fmt_bytes}</td>
+    <td>{scmUsed|fmt_bytes}</td>
+    <td>{failed}</td>
+  </tr>
+  {/ozone.LocationReport}
+</table>
+{/ozone.enabled}
+
 <div class="page-header"><h1>Volume Information</h1></div>
 <table class="table">
   <thead>
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties
index 7378846..2d1c98b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties
@@ -47,3 +47,27 @@
 log4j.appender.DNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n
 log4j.appender.DNMETRICSRFA.MaxBackupIndex=1
 log4j.appender.DNMETRICSRFA.MaxFileSize=64MB
+
+#
+# Add a logger for ozone that is separate from the Datanode.
+#
+log4j.logger.org.apache.hadoop.ozone=INFO,OZONE,FILE
+
+# Do not log into datanode logs. Remove this line to have single log.
+log4j.additivity.org.apache.hadoop.ozone=false
+
+# For development purposes, log both to console and log file.
+log4j.appender.OZONE=org.apache.log4j.ConsoleAppender
+log4j.appender.OZONE.Threshold=ALL
+log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout
+log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) \
+ %X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n
+
+# Real ozone logger that writes to ozone.log
+log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log
+log4j.appender.FILE.Threshold=debug
+log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
+log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+  (%F:%L) %X{function} %X{resource} %X{user} %X{request} - \
+  %m%n
diff --git a/hadoop-minicluster/pom.xml b/hadoop-minicluster/pom.xml
index 636ee35..5a35365 100644
--- a/hadoop-minicluster/pom.xml
+++ b/hadoop-minicluster/pom.xml
@@ -67,6 +67,12 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>io.kubernetes</groupId>
+          <artifactId>client-java</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
 
     <dependency>
diff --git a/hadoop-ozone/acceptance-test/README.md b/hadoop-ozone/acceptance-test/README.md
new file mode 100644
index 0000000..07d10fb
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/README.md
@@ -0,0 +1,38 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+# Acceptance test suite for Ozone/Hdsl
+
+This project contains acceptance tests for ozone/hdds using docker-compose and [robot framework](http://robotframework.org/).
+
+## Run
+
+To run the acceptance tests, please activate the `ozone-acceptance-test` profile and do a full build.
+
+Typically you need a `mvn install -Phdsl,ozone-acceptance-test,dist -DskipTests` for a build without unit tests but with acceptance test.
+
+Notes:
+
+ 1. You need a hadoop build in hadoop-dist/target directory.
+ 2. The `ozone-acceptance-test` could be activated with profile even if the unit tests are disabled.
+
+
+## Development
+
+You can run manually the robot tests with `robot` cli. (See robotframework docs to install it.)
+
+ 1. Go to the `src/test/robotframework`
+ 2. Execute `robot -v basedir:${PWD}/../../.. -v VERSION:3.2.0-SNAPSHOT .`
+
+You can also use select just one test with -t `"*testnamefragment*"`
\ No newline at end of file
diff --git a/hadoop-ozone/acceptance-test/pom.xml b/hadoop-ozone/acceptance-test/pom.xml
new file mode 100644
index 0000000..b4b24d5
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/pom.xml
@@ -0,0 +1,86 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>3.2.0-SNAPSHOT</version>
+    <relativePath>../../hadoop-project</relativePath>
+  </parent>
+  <artifactId>hadoop-ozone-acceptance-test</artifactId>
+  <version>3.2.0-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone Acceptance test</description>
+  <name>Apache Hadoop Ozone acceptance test</name>
+  <packaging>pom</packaging>
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-resources-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-docker-compose</id>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <phase>process-test-resources</phase>
+            <configuration>
+              <outputDirectory>${project.build.directory}/compose
+              </outputDirectory>
+              <resources>
+                <resource>
+                  <directory>src/test/compose</directory>
+                  <filtering>true</filtering>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <profiles>
+    <profile>
+      <id>ozone-acceptance-test</id>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.robotframework</groupId>
+            <artifactId>robotframework-maven-plugin</artifactId>
+            <version>1.4.7</version>
+            <executions>
+              <execution>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <variables>
+                    <variable>version:${project.version}</variable>
+                    <variable>basedir:${project.basedir}</variable>
+                  </variables>
+                  <skip>false</skip>
+                  <skipTests>false</skipTests>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>
diff --git a/hadoop-ozone/acceptance-test/src/test/compose/.env b/hadoop-ozone/acceptance-test/src/test/compose/.env
new file mode 100644
index 0000000..79f890b
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/src/test/compose/.env
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+HADOOPDIR=../../hadoop-dist/target/hadoop-${project.version}
\ No newline at end of file
diff --git a/hadoop-ozone/acceptance-test/src/test/compose/docker-compose.yaml b/hadoop-ozone/acceptance-test/src/test/compose/docker-compose.yaml
new file mode 100644
index 0000000..8350eae
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/src/test/compose/docker-compose.yaml
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+version: "3"
+services:
+   namenode:
+      image: apache/hadoop-runner
+      hostname: namenode
+      volumes:
+         - ${HADOOPDIR}:/opt/hadoop
+      ports:
+         - 9870
+      environment:
+          ENSURE_NAMENODE_DIR: /data/namenode
+      env_file:
+         - ./docker-config
+      command: ["/opt/hadoop/bin/hdfs","namenode"]
+   datanode:
+      image: apache/hadoop-runner
+      volumes:
+        - ${HADOOPDIR}:/opt/hadoop
+      ports:
+        - 9864
+      command: ["/opt/hadoop/bin/ozone","datanode"]
+      env_file:
+        - ./docker-config
+   ksm:
+      image: apache/hadoop-runner
+      volumes:
+         - ${HADOOPDIR}:/opt/hadoop
+      ports:
+         - 9874
+      environment:
+         ENSURE_KSM_INITIALIZED: /data/metadata/ksm/current/VERSION
+      env_file:
+          - ./docker-config
+      command: ["/opt/hadoop/bin/ozone","ksm"]
+   scm:
+      image: apache/hadoop-runner
+      volumes:
+         - ${HADOOPDIR}:/opt/hadoop
+      ports:
+         - 9876
+      env_file:
+          - ./docker-config
+      environment:
+          ENSURE_SCM_INITIALIZED: /data/metadata/scm/current/VERSION
+      command: ["/opt/hadoop/bin/ozone","scm"]
diff --git a/hadoop-ozone/acceptance-test/src/test/compose/docker-config b/hadoop-ozone/acceptance-test/src/test/compose/docker-config
new file mode 100644
index 0000000..c693db0
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/src/test/compose/docker-config
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CORE-SITE.XML_fs.defaultFS=hdfs://namenode:9000
+OZONE-SITE.XML_ozone.ksm.address=ksm
+OZONE-SITE.XML_ozone.scm.names=scm
+OZONE-SITE.XML_ozone.enabled=True
+OZONE-SITE.XML_ozone.scm.datanode.id=/data/datanode.id
+OZONE-SITE.XML_ozone.scm.block.client.address=scm
+OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata
+OZONE-SITE.XML_ozone.handler.type=distributed
+OZONE-SITE.XML_ozone.scm.client.address=scm
+OZONE-SITE.XML_hdds.datanode.plugins=org.apache.hadoop.ozone.web.OzoneHddsDatanodeService
+HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000
+HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode
+HDFS-SITE.XML_rpc.metrics.quantile.enable=true
+HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
+HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.HddsDatanodeService
+LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
+LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
diff --git a/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone.robot b/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone.robot
new file mode 100644
index 0000000..ea9131e
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone.robot
@@ -0,0 +1,116 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       Smoke test to start cluster with docker-compose environments.
+Library             OperatingSystem
+Suite Setup         Startup Ozone Cluster
+Suite Teardown      Teardown Ozone Cluster
+
+*** Variables ***
+${COMMON_REST_HEADER}   -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H  "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE root"
+${version}
+
+*** Test Cases ***
+
+Daemons are running without error
+    Is daemon running without error           ksm
+    Is daemon running without error           scm
+    Is daemon running without error           namenode
+    Is daemon running without error           datanode
+
+Check if datanode is connected to the scm
+    Wait Until Keyword Succeeds     3min    5sec    Have healthy datanodes   1
+
+Scale it up to 5 datanodes
+    Scale datanodes up  5
+    Wait Until Keyword Succeeds     3min    5sec    Have healthy datanodes   5
+
+Test rest interface
+    ${result} =     Execute on          datanode        curl -i -X POST ${COMMON_RESTHEADER} "http://localhost:9880/volume1"
+                    Should contain      ${result}       201 Created
+    ${result} =     Execute on          datanode        curl -i -X POST ${COMMON_RESTHEADER} "http://localhost:9880/volume1/bucket1"
+                    Should contain      ${result}       201 Created
+    ${result} =     Execute on          datanode        curl -i -X DELETE ${COMMON_RESTHEADER} "http://localhost:9880/volume1/bucket1"
+                    Should contain      ${result}       200 OK
+    ${result} =     Execute on          datanode        curl -i -X DELETE ${COMMON_RESTHEADER} "http://localhost:9880/volume1"
+                    Should contain      ${result}       200 OK
+
+Test ozone cli
+                    Execute on          datanode        ozone oz -createVolume http://localhost:9880/hive -user bilbo -quota 100TB -root
+    ${result} =     Execute on          datanode        ozone oz -listVolume http://localhost:9880/ -user bilbo | grep -v Removed | jq '.[] | select(.volumeName=="hive")'
+                    Should contain      ${result}       createdOn
+                    Execute on          datanode        ozone oz -createBucket http://localhost:9880/hive/bb1
+    ${result}       Execute on          datanode        ozone oz -listBucket http://localhost:9880/hive/ | grep -v Removed | jq -r '.[] | select(.bucketName=="bb1") | .volumeName'
+                    Should Be Equal     ${result}       hive
+                    Execute on          datanode        ozone oz -deleteBucket http://localhost:9880/hive/bb1
+                    Execute on          datanode        ozone oz -deleteVolume http://localhost:9880/hive -user bilbo
+
+
+
+Check webui static resources
+    ${result} =			Execute on		scm		curl -s -I http://localhost:9876/static/bootstrap-3.0.2/js/bootstrap.min.js
+	 Should contain		${result}		200
+    ${result} =			Execute on		ksm		curl -s -I http://localhost:9874/static/bootstrap-3.0.2/js/bootstrap.min.js
+	 Should contain		${result}		200
+
+Start freon testing
+    ${result} =		Execute on		ksm		ozone freon -numOfVolumes 5 -numOfBuckets 5 -numOfKeys 5 -numOfThreads 10
+	 Wait Until Keyword Succeeds	3min	10sec		Should contain		${result}		Number of Keys added: 125
+	 Should Not Contain		${result}		ERROR
+
+*** Keywords ***
+
+Startup Ozone Cluster
+    ${rc}       ${output} =                 Run docker compose          down
+    ${rc}       ${output} =                 Run docker compose          up -d
+    Should Be Equal As Integers 	        ${rc} 	                    0
+    Wait Until Keyword Succeeds             1min    5sec    Is Daemon started   ksm     HTTP server of KSM is listening
+
+Teardown Ozone Cluster
+    Run docker compose      down
+    
+Is daemon running without error
+    [arguments]             ${name}
+    ${result} =             Run                     docker ps
+    Should contain          ${result}               _${name}_1
+    ${rc}                   ${result} =             Run docker compose      logs ${name}
+    Should not contain      ${result}               ERROR
+
+Is Daemon started
+    [arguments]     ${name}             ${expression}
+    ${rc}           ${result} =         Run docker compose      logs
+    Should contain  ${result}           ${expression}
+
+Have healthy datanodes
+    [arguments]         ${requirednodes}
+    ${result} =         Execute on          scm                 curl -s 'http://localhost:9876/jmx?qry=Hadoop:service=SCMNodeManager,name=SCMNodeManagerInfo' | jq -r '.beans[0].NodeCount[] | select(.key=="HEALTHY") | .value'
+    Should Be Equal     ${result}           ${requirednodes}
+
+Scale datanodes up
+    [arguments]              ${requirednodes}
+    Run docker compose       scale datanode=${requirednodes}
+
+Execute on
+    [arguments]     ${componentname}    ${command}
+    ${rc}           ${return} =         Run docker compose          exec ${componentname} ${command}
+    [return]        ${return}
+
+Run docker compose
+    [arguments]                     ${command}
+                                    Set Environment Variable    HADOOPDIR                              ${basedir}/../../hadoop-dist/target/hadoop-${version}
+    ${rc}                           ${output} =                 Run And Return Rc And Output           docker-compose -f ${basedir}/target/compose/docker-compose.yaml ${command}
+    Should Be Equal As Integers     ${rc}                       0
+    [return]                            ${rc}                       ${output}
diff --git a/hadoop-ozone/client/pom.xml b/hadoop-ozone/client/pom.xml
new file mode 100644
index 0000000..fae630f
--- /dev/null
+++ b/hadoop-ozone/client/pom.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-ozone</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-ozone-client</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone Client</description>
+  <name>Apache Hadoop Ozone Client</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>ozone</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+  </dependencies>
+</project>
\ No newline at end of file
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/BucketArgs.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/BucketArgs.java
new file mode 100644
index 0000000..39b7bb8
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/BucketArgs.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.OzoneAcl;
+
+import java.util.List;
+
+/**
+ * This class encapsulates the arguments that are
+ * required for creating a bucket.
+ */
+public final class BucketArgs {
+
+  /**
+   * ACL Information.
+   */
+  private List<OzoneAcl> acls;
+  /**
+   * Bucket Version flag.
+   */
+  private Boolean versioning;
+  /**
+   * Type of storage to be used for this bucket.
+   * [RAM_DISK, SSD, DISK, ARCHIVE]
+   */
+  private StorageType storageType;
+
+  /**
+   * Private constructor, constructed via builder.
+   * @param versioning Bucket version flag.
+   * @param storageType Storage type to be used.
+   * @param acls list of ACLs.
+   */
+  private BucketArgs(Boolean versioning, StorageType storageType,
+                     List<OzoneAcl> acls) {
+    this.acls = acls;
+    this.versioning = versioning;
+    this.storageType = storageType;
+  }
+
+  /**
+   * Returns true if bucket version is enabled, else false.
+   * @return isVersionEnabled
+   */
+  public Boolean getVersioning() {
+    return versioning;
+  }
+
+  /**
+   * Returns the type of storage to be used.
+   * @return StorageType
+   */
+  public StorageType getStorageType() {
+    return storageType;
+  }
+
+  /**
+   * Returns the ACL's associated with this bucket.
+   * @return List<OzoneAcl>
+   */
+  public List<OzoneAcl> getAcls() {
+    return acls;
+  }
+
+  /**
+   * Returns new builder class that builds a KsmBucketInfo.
+   *
+   * @return Builder
+   */
+  public static BucketArgs.Builder newBuilder() {
+    return new BucketArgs.Builder();
+  }
+
+  /**
+   * Builder for KsmBucketInfo.
+   */
+  public static class Builder {
+    private Boolean versioning;
+    private StorageType storageType;
+    private List<OzoneAcl> acls;
+
+    public BucketArgs.Builder setVersioning(Boolean versionFlag) {
+      this.versioning = versionFlag;
+      return this;
+    }
+
+    public BucketArgs.Builder setStorageType(StorageType storage) {
+      this.storageType = storage;
+      return this;
+    }
+
+    public BucketArgs.Builder setAcls(List<OzoneAcl> listOfAcls) {
+      this.acls = listOfAcls;
+      return this;
+    }
+
+    /**
+     * Constructs the BucketArgs.
+     * @return instance of BucketArgs.
+     */
+    public BucketArgs build() {
+      return new BucketArgs(versioning, storageType, acls);
+    }
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/ObjectStore.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/ObjectStore.java
new file mode 100644
index 0000000..b915213
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/ObjectStore.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * ObjectStore class is responsible for the client operations that can be
+ * performed on Ozone Object Store.
+ */
+public class ObjectStore {
+
+  /**
+   * The proxy used for connecting to the cluster and perform
+   * client operations.
+   */
+  private final ClientProtocol proxy;
+
+  /**
+   * Cache size to be used for listVolume calls.
+   */
+  private int listCacheSize;
+
+  /**
+   * Creates an instance of ObjectStore.
+   * @param conf Configuration object.
+   * @param proxy ClientProtocol proxy.
+   */
+  public ObjectStore(Configuration conf, ClientProtocol proxy) {
+    this.proxy = proxy;
+    this.listCacheSize = HddsClientUtils.getListCacheSize(conf);
+  }
+
+  /**
+   * Creates the volume with default values.
+   * @param volumeName Name of the volume to be created.
+   * @throws IOException
+   */
+  public void createVolume(String volumeName) throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    HddsClientUtils.verifyResourceName(volumeName);
+    proxy.createVolume(volumeName);
+  }
+
+  /**
+   * Creates the volume.
+   * @param volumeName Name of the volume to be created.
+   * @param volumeArgs Volume properties.
+   * @throws IOException
+   */
+  public void createVolume(String volumeName, VolumeArgs volumeArgs)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(volumeArgs);
+    HddsClientUtils.verifyResourceName(volumeName);
+    proxy.createVolume(volumeName, volumeArgs);
+  }
+
+  /**
+   * Returns the volume information.
+   * @param volumeName Name of the volume.
+   * @return OzoneVolume
+   * @throws IOException
+   */
+  public OzoneVolume getVolume(String volumeName) throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    HddsClientUtils.verifyResourceName(volumeName);
+    OzoneVolume volume = proxy.getVolumeDetails(volumeName);
+    return volume;
+  }
+
+
+  /**
+   * Returns Iterator to iterate over all the volumes in object store.
+   * The result can be restricted using volume prefix, will return all
+   * volumes if volume prefix is null.
+   *
+   * @param volumePrefix Volume prefix to match
+   * @return {@code Iterator<OzoneVolume>}
+   */
+  public Iterator<OzoneVolume> listVolumes(String volumePrefix)
+      throws IOException {
+    return new VolumeIterator(volumePrefix);
+  }
+
+  /**
+   * Returns Iterator to iterate over the List of volumes owned by a specific
+   * user. The result can be restricted using volume prefix, will return all
+   * volumes if volume prefix is null. If user is null, returns the volume of
+   * current user.
+   *
+   * @param user User Name
+   * @param volumePrefix Volume prefix to match
+   * @return {@code Iterator<OzoneVolume>}
+   */
+  public Iterator<OzoneVolume> listVolumes(String user, String volumePrefix)
+      throws IOException {
+    if(Strings.isNullOrEmpty(user)) {
+      user = UserGroupInformation.getCurrentUser().getShortUserName();
+    }
+    return new VolumeIterator(user, volumePrefix);
+  }
+
+  /**
+   * Deletes the volume.
+   * @param volumeName Name of the volume.
+   * @throws IOException
+   */
+  public void deleteVolume(String volumeName) throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    HddsClientUtils.verifyResourceName(volumeName);
+    proxy.deleteVolume(volumeName);
+  }
+
+  /**
+   * An Iterator to iterate over {@link OzoneVolume} list.
+   */
+  private class VolumeIterator implements Iterator<OzoneVolume> {
+
+    private String user = null;
+    private String volPrefix = null;
+
+    private Iterator<OzoneVolume> currentIterator;
+    private OzoneVolume currentValue;
+
+    /**
+     * Creates an Iterator to iterate over all volumes in the cluster,
+     * which matches the volume prefix.
+     * @param volPrefix prefix to match
+     */
+    VolumeIterator(String volPrefix) {
+      this(null, volPrefix);
+    }
+
+    /**
+     * Creates an Iterator to iterate over all volumes of the user,
+     * which matches volume prefix.
+     * @param user user name
+     * @param volPrefix volume prefix to match
+     */
+    VolumeIterator(String user, String volPrefix) {
+      this.user = user;
+      this.volPrefix = volPrefix;
+      this.currentValue = null;
+      this.currentIterator = getNextListOfVolumes(null).iterator();
+    }
+
+    @Override
+    public boolean hasNext() {
+      if(!currentIterator.hasNext()) {
+        currentIterator = getNextListOfVolumes(
+            currentValue != null ? currentValue.getName() : null)
+            .iterator();
+      }
+      return currentIterator.hasNext();
+    }
+
+    @Override
+    public OzoneVolume next() {
+      if(hasNext()) {
+        currentValue = currentIterator.next();
+        return currentValue;
+      }
+      throw new NoSuchElementException();
+    }
+
+    /**
+     * Returns the next set of volume list using proxy.
+     * @param prevVolume previous volume, this will be excluded from the result
+     * @return {@code List<OzoneVolume>}
+     */
+    private List<OzoneVolume> getNextListOfVolumes(String prevVolume) {
+      try {
+        //if user is null, we do list of all volumes.
+        if(user != null) {
+          return proxy.listVolumes(user, volPrefix, prevVolume, listCacheSize);
+        }
+        return proxy.listVolumes(volPrefix, prevVolume, listCacheSize);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneBucket.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneBucket.java
new file mode 100644
index 0000000..ba6286b
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneBucket.java
@@ -0,0 +1,363 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+import org.apache.hadoop.ozone.OzoneAcl;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * A class that encapsulates OzoneBucket.
+ */
+public class OzoneBucket {
+
+  /**
+   * The proxy used for connecting to the cluster and perform
+   * client operations.
+   */
+  private final ClientProtocol proxy;
+  /**
+   * Name of the volume in which the bucket belongs to.
+   */
+  private final String volumeName;
+  /**
+   * Name of the bucket.
+   */
+  private final String name;
+  /**
+   * Default replication factor to be used while creating keys.
+   */
+  private final ReplicationFactor defaultReplication;
+
+  /**
+   * Default replication type to be used while creating keys.
+   */
+  private final ReplicationType defaultReplicationType;
+  /**
+   * Bucket ACLs.
+   */
+  private List<OzoneAcl> acls;
+
+  /**
+   * Type of storage to be used for this bucket.
+   * [RAM_DISK, SSD, DISK, ARCHIVE]
+   */
+  private StorageType storageType;
+
+  /**
+   * Bucket Version flag.
+   */
+  private Boolean versioning;
+
+  /**
+   * Cache size to be used for listKey calls.
+   */
+  private int listCacheSize;
+
+  /**
+   * Creation time of the bucket.
+   */
+  private long creationTime;
+
+  /**
+   * Constructs OzoneBucket instance.
+   * @param conf Configuration object.
+   * @param proxy ClientProtocol proxy.
+   * @param volumeName Name of the volume the bucket belongs to.
+   * @param bucketName Name of the bucket.
+   * @param acls ACLs associated with the bucket.
+   * @param storageType StorageType of the bucket.
+   * @param versioning versioning status of the bucket.
+   * @param creationTime creation time of the bucket.
+   */
+  public OzoneBucket(Configuration conf, ClientProtocol proxy,
+                     String volumeName, String bucketName,
+                     List<OzoneAcl> acls, StorageType storageType,
+                     Boolean versioning, long creationTime) {
+    this.proxy = proxy;
+    this.volumeName = volumeName;
+    this.name = bucketName;
+    this.acls = acls;
+    this.storageType = storageType;
+    this.versioning = versioning;
+    this.listCacheSize = HddsClientUtils.getListCacheSize(conf);
+    this.creationTime = creationTime;
+    this.defaultReplication = ReplicationFactor.valueOf(conf.getInt(
+        OzoneConfigKeys.OZONE_REPLICATION,
+        OzoneConfigKeys.OZONE_REPLICATION_DEFAULT));
+    this.defaultReplicationType = ReplicationType.valueOf(conf.get(
+        OzoneConfigKeys.OZONE_REPLICATION_TYPE,
+        OzoneConfigKeys.OZONE_REPLICATION_TYPE_DEFAULT));
+  }
+
+  /**
+   * Returns Volume Name.
+   *
+   * @return volumeName
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Returns Bucket Name.
+   *
+   * @return bucketName
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * Returns ACL's associated with the Bucket.
+   *
+   * @return acls
+   */
+  public List<OzoneAcl> getAcls() {
+    return acls;
+  }
+
+  /**
+   * Returns StorageType of the Bucket.
+   *
+   * @return storageType
+   */
+  public StorageType getStorageType() {
+    return storageType;
+  }
+
+  /**
+   * Returns Versioning associated with the Bucket.
+   *
+   * @return versioning
+   */
+  public Boolean getVersioning() {
+    return versioning;
+  }
+
+  /**
+   * Returns creation time of the Bucket.
+   *
+   * @return creation time of the bucket
+   */
+  public long getCreationTime() {
+    return creationTime;
+  }
+
+  /**
+   * Adds ACLs to the Bucket.
+   * @param addAcls ACLs to be added
+   * @throws IOException
+   */
+  public void addAcls(List<OzoneAcl> addAcls) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(addAcls);
+    proxy.addBucketAcls(volumeName, name, addAcls);
+    addAcls.stream().filter(acl -> !acls.contains(acl)).forEach(
+        acls::add);
+  }
+
+  /**
+   * Removes ACLs from the bucket.
+   * @param removeAcls ACLs to be removed
+   * @throws IOException
+   */
+  public void removeAcls(List<OzoneAcl> removeAcls) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(removeAcls);
+    proxy.removeBucketAcls(volumeName, name, removeAcls);
+    acls.removeAll(removeAcls);
+  }
+
+  /**
+   * Sets/Changes the storage type of the bucket.
+   * @param newStorageType Storage type to be set
+   * @throws IOException
+   */
+  public void setStorageType(StorageType newStorageType) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(newStorageType);
+    proxy.setBucketStorageType(volumeName, name, newStorageType);
+    storageType = newStorageType;
+  }
+
+  /**
+   * Enable/Disable versioning of the bucket.
+   * @param newVersioning
+   * @throws IOException
+   */
+  public void setVersioning(Boolean newVersioning) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(newVersioning);
+    proxy.setBucketVersioning(volumeName, name, newVersioning);
+    versioning = newVersioning;
+  }
+
+  /**
+   * Creates a new key in the bucket, with default replication type RATIS and
+   * with replication factor THREE.
+   * @param key Name of the key to be created.
+   * @param size Size of the data the key will point to.
+   * @return OzoneOutputStream to which the data has to be written.
+   * @throws IOException
+   */
+  public OzoneOutputStream createKey(String key, long size)
+      throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(key);
+    return createKey(key, size, defaultReplicationType, defaultReplication);
+  }
+
+  /**
+   * Creates a new key in the bucket.
+   * @param key Name of the key to be created.
+   * @param size Size of the data the key will point to.
+   * @param type Replication type to be used.
+   * @param factor Replication factor of the key.
+   * @return OzoneOutputStream to which the data has to be written.
+   * @throws IOException
+   */
+  public OzoneOutputStream createKey(String key, long size,
+                                     ReplicationType type,
+                                     ReplicationFactor factor)
+      throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(key);
+    Preconditions.checkNotNull(type);
+    Preconditions.checkNotNull(factor);
+    return proxy.createKey(volumeName, name, key, size, type, factor);
+  }
+
+  /**
+   * Reads an existing key from the bucket.
+   * @param key Name of the key to be read.
+   * @return OzoneInputStream the stream using which the data can be read.
+   * @throws IOException
+   */
+  public OzoneInputStream readKey(String key) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(key);
+    return proxy.getKey(volumeName, name, key);
+  }
+
+  /**
+   * Returns information about the key.
+   * @param key Name of the key.
+   * @return OzoneKey Information about the key.
+   * @throws IOException
+   */
+  public OzoneKey getKey(String key) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(key);
+    return proxy.getKeyDetails(volumeName, name, key);
+  }
+
+  /**
+   * Returns Iterator to iterate over all keys in the bucket.
+   * The result can be restricted using key prefix, will return all
+   * keys if key prefix is null.
+   *
+   * @param keyPrefix Bucket prefix to match
+   * @return {@code Iterator<OzoneKey>}
+   */
+  public Iterator<OzoneKey> listKeys(String keyPrefix) {
+    return new KeyIterator(keyPrefix);
+  }
+
+  /**
+   * Deletes key from the bucket.
+   * @param key Name of the key to be deleted.
+   * @throws IOException
+   */
+  public void deleteKey(String key) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(key);
+    proxy.deleteKey(volumeName, name, key);
+  }
+
+  /**
+   * An Iterator to iterate over {@link OzoneKey} list.
+   */
+  private class KeyIterator implements Iterator<OzoneKey> {
+
+    private String keyPrefix = null;
+
+    private Iterator<OzoneKey> currentIterator;
+    private OzoneKey currentValue;
+
+
+    /**
+     * Creates an Iterator to iterate over all keys in the bucket,
+     * which matches volume prefix.
+     * @param keyPrefix
+     */
+    KeyIterator(String keyPrefix) {
+      this.keyPrefix = keyPrefix;
+      this.currentValue = null;
+      this.currentIterator = getNextListOfKeys(null).iterator();
+    }
+
+    @Override
+    public boolean hasNext() {
+      if(!currentIterator.hasNext()) {
+        currentIterator = getNextListOfKeys(
+            currentValue != null ? currentValue.getName() : null)
+            .iterator();
+      }
+      return currentIterator.hasNext();
+    }
+
+    @Override
+    public OzoneKey next() {
+      if(hasNext()) {
+        currentValue = currentIterator.next();
+        return currentValue;
+      }
+      throw new NoSuchElementException();
+    }
+
+    /**
+     * Gets the next set of key list using proxy.
+     * @param prevKey
+     * @return {@code List<OzoneVolume>}
+     */
+    private List<OzoneKey> getNextListOfKeys(String prevKey) {
+      try {
+        return proxy.listKeys(volumeName, name, keyPrefix, prevKey,
+            listCacheSize);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClient.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClient.java
new file mode 100644
index 0000000..f191507
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClient.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * OzoneClient connects to Ozone Cluster and
+ * perform basic operations.
+ */
+public class OzoneClient implements Closeable {
+
+  /*
+   * OzoneClient connects to Ozone Cluster and
+   * perform basic operations.
+   *
+   * +-------------+     +---+   +-------------------------------------+
+   * | OzoneClient | --> | C |   | Object Store                        |
+   * |_____________|     | l |   |  +-------------------------------+  |
+   *                     | i |   |  | Volume(s)                     |  |
+   *                     | e |   |  |   +------------------------+  |  |
+   *                     | n |   |  |   | Bucket(s)              |  |  |
+   *                     | t |   |  |   |   +------------------+ |  |  |
+   *                     |   |   |  |   |   | Key -> Value (s) | |  |  |
+   *                     | P |-->|  |   |   |                  | |  |  |
+   *                     | r |   |  |   |   |__________________| |  |  |
+   *                     | o |   |  |   |                        |  |  |
+   *                     | t |   |  |   |________________________|  |  |
+   *                     | o |   |  |                               |  |
+   *                     | c |   |  |_______________________________|  |
+   *                     | o |   |                                     |
+   *                     | l |   |_____________________________________|
+   *                     |___|
+   * Example:
+   * ObjectStore store = client.getObjectStore();
+   * store.createVolume(“volume one”, VolumeArgs);
+   * volume.setQuota(“10 GB”);
+   * OzoneVolume volume = store.getVolume(“volume one”);
+   * volume.createBucket(“bucket one”, BucketArgs);
+   * bucket.setVersioning(true);
+   * OzoneOutputStream os = bucket.createKey(“key one”, 1024);
+   * os.write(byte[]);
+   * os.close();
+   * OzoneInputStream is = bucket.readKey(“key one”);
+   * is.read();
+   * is.close();
+   * bucket.deleteKey(“key one”);
+   * volume.deleteBucket(“bucket one”);
+   * store.deleteVolume(“volume one”);
+   * client.close();
+   */
+
+  private final ClientProtocol proxy;
+  private final ObjectStore objectStore;
+
+  /**
+   * Creates a new OzoneClient object, generally constructed
+   * using {@link OzoneClientFactory}.
+   * @param conf Configuration object
+   * @param proxy ClientProtocol proxy instance
+   */
+  public OzoneClient(Configuration conf, ClientProtocol proxy) {
+    this.proxy = proxy;
+    this.objectStore = new ObjectStore(conf, this.proxy);
+  }
+
+  /**
+   * Returns the object store associated with the Ozone Cluster.
+   * @return ObjectStore
+   */
+  public ObjectStore getObjectStore() {
+    return objectStore;
+  }
+
+  /**
+   * Closes the client and all the underlying resources.
+   * @throws IOException
+   */
+  @Override
+  public void close() throws IOException {
+    proxy.close();
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientFactory.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientFactory.java
new file mode 100644
index 0000000..6b24f2a
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientFactory.java
@@ -0,0 +1,307 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+import org.apache.hadoop.ozone.client.rest.RestClient;
+import org.apache.hadoop.ozone.client.rpc.RpcClient;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Proxy;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_CLIENT_PROTOCOL;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_HTTP_ADDRESS_KEY;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_HTTP_BIND_PORT_DEFAULT;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys.OZONE_KSM_ADDRESS_KEY;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys.OZONE_KSM_PORT_DEFAULT;
+
+/**
+ * Factory class to create different types of OzoneClients.
+ * Based on <code>ozone.client.protocol</code>, it decides which
+ * protocol to use for the communication.
+ * Default value is
+ * <code>org.apache.hadoop.ozone.client.rpc.RpcClient</code>.<br>
+ * OzoneClientFactory constructs a proxy using
+ * {@link OzoneClientInvocationHandler}
+ * and creates OzoneClient instance with it.
+ * {@link OzoneClientInvocationHandler} dispatches the call to
+ * underlying {@link ClientProtocol} implementation.
+ */
+public final class OzoneClientFactory {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      OzoneClientFactory.class);
+
+  /**
+   * Private constructor, class is not meant to be initialized.
+   */
+  private OzoneClientFactory(){}
+
+
+  /**
+   * Constructs and return an OzoneClient with default configuration.
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getClient() throws IOException {
+    LOG.info("Creating OzoneClient with default configuration.");
+    return getClient(new OzoneConfiguration());
+  }
+
+  /**
+   * Constructs and return an OzoneClient based on the configuration object.
+   * Protocol type is decided by <code>ozone.client.protocol</code>.
+   *
+   * @param config
+   *        Configuration to be used for OzoneClient creation
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getClient(Configuration config)
+      throws IOException {
+    Preconditions.checkNotNull(config);
+    Class<? extends ClientProtocol> clazz = (Class<? extends ClientProtocol>)
+        config.getClass(OZONE_CLIENT_PROTOCOL, RpcClient.class);
+    return getClient(getClientProtocol(clazz, config), config);
+  }
+
+  /**
+   * Returns an OzoneClient which will use RPC protocol.
+   *
+   * @param ksmHost
+   *        hostname of KeySpaceManager to connect.
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRpcClient(String ksmHost)
+      throws IOException {
+    return getRpcClient(ksmHost, OZONE_KSM_PORT_DEFAULT,
+        new OzoneConfiguration());
+  }
+
+  /**
+   * Returns an OzoneClient which will use RPC protocol.
+   *
+   * @param ksmHost
+   *        hostname of KeySpaceManager to connect.
+   *
+   * @param ksmRpcPort
+   *        RPC port of KeySpaceManager.
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRpcClient(String ksmHost, Integer ksmRpcPort)
+      throws IOException {
+    return getRpcClient(ksmHost, ksmRpcPort, new OzoneConfiguration());
+  }
+
+  /**
+   * Returns an OzoneClient which will use RPC protocol.
+   *
+   * @param ksmHost
+   *        hostname of KeySpaceManager to connect.
+   *
+   * @param ksmRpcPort
+   *        RPC port of KeySpaceManager.
+   *
+   * @param config
+   *        Configuration to be used for OzoneClient creation
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRpcClient(String ksmHost, Integer ksmRpcPort,
+                                         Configuration config)
+      throws IOException {
+    Preconditions.checkNotNull(ksmHost);
+    Preconditions.checkNotNull(ksmRpcPort);
+    Preconditions.checkNotNull(config);
+    config.set(OZONE_KSM_ADDRESS_KEY, ksmHost + ":" + ksmRpcPort);
+    return getRpcClient(config);
+  }
+
+  /**
+   * Returns an OzoneClient which will use RPC protocol.
+   *
+   * @param config
+   *        used for OzoneClient creation
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRpcClient(Configuration config)
+      throws IOException {
+    Preconditions.checkNotNull(config);
+    return getClient(getClientProtocol(RpcClient.class, config),
+        config);
+  }
+
+  /**
+   * Returns an OzoneClient which will use REST protocol.
+   *
+   * @param ksmHost
+   *        hostname of KeySpaceManager to connect.
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRestClient(String ksmHost)
+      throws IOException {
+    return getRestClient(ksmHost, OZONE_KSM_HTTP_BIND_PORT_DEFAULT);
+  }
+
+  /**
+   * Returns an OzoneClient which will use REST protocol.
+   *
+   * @param ksmHost
+   *        hostname of KeySpaceManager to connect.
+   *
+   * @param ksmHttpPort
+   *        HTTP port of KeySpaceManager.
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRestClient(String ksmHost, Integer ksmHttpPort)
+      throws IOException {
+    return getRestClient(ksmHost, ksmHttpPort, new OzoneConfiguration());
+  }
+
+  /**
+   * Returns an OzoneClient which will use REST protocol.
+   *
+   * @param ksmHost
+   *        hostname of KeySpaceManager to connect.
+   *
+   * @param ksmHttpPort
+   *        HTTP port of KeySpaceManager.
+   *
+   * @param config
+   *        Configuration to be used for OzoneClient creation
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRestClient(String ksmHost, Integer ksmHttpPort,
+                                          Configuration config)
+      throws IOException {
+    Preconditions.checkNotNull(ksmHost);
+    Preconditions.checkNotNull(ksmHttpPort);
+    Preconditions.checkNotNull(config);
+    config.set(OZONE_KSM_HTTP_ADDRESS_KEY, ksmHost + ":" +  ksmHttpPort);
+    return getRestClient(config);
+  }
+
+  /**
+   * Returns an OzoneClient which will use REST protocol.
+   *
+   * @param config
+   *        Configuration to be used for OzoneClient creation
+   *
+   * @return OzoneClient
+   *
+   * @throws IOException
+   */
+  public static OzoneClient getRestClient(Configuration config)
+      throws IOException {
+    Preconditions.checkNotNull(config);
+    return getClient(getClientProtocol(RestClient.class, config),
+        config);
+  }
+
+  /**
+   * Creates OzoneClient with the given ClientProtocol and Configuration.
+   *
+   * @param clientProtocol
+   *        Protocol to be used by the OzoneClient
+   *
+   * @param config
+   *        Configuration to be used for OzoneClient creation
+   */
+  private static OzoneClient getClient(ClientProtocol clientProtocol,
+                                       Configuration config) {
+    OzoneClientInvocationHandler clientHandler =
+        new OzoneClientInvocationHandler(clientProtocol);
+    ClientProtocol proxy = (ClientProtocol) Proxy.newProxyInstance(
+        OzoneClientInvocationHandler.class.getClassLoader(),
+        new Class<?>[]{ClientProtocol.class}, clientHandler);
+    return new OzoneClient(config, proxy);
+  }
+
+  /**
+   * Returns an instance of Protocol class.
+   *
+   * @param protocolClass
+   *        Class object of the ClientProtocol.
+   *
+   * @param config
+   *        Configuration used to initialize ClientProtocol.
+   *
+   * @return ClientProtocol
+   *
+   * @throws IOException
+   */
+  private static ClientProtocol getClientProtocol(
+      Class<? extends ClientProtocol> protocolClass, Configuration config)
+      throws IOException {
+    try {
+      LOG.debug("Using {} as client protocol.",
+          protocolClass.getCanonicalName());
+      Constructor<? extends ClientProtocol> ctor =
+          protocolClass.getConstructor(Configuration.class);
+      return ctor.newInstance(config);
+    } catch (Exception e) {
+      final String message = "Couldn't create protocol " + protocolClass;
+      LOG.error(message + " exception:" + e);
+      if (e.getCause() instanceof IOException) {
+        throw (IOException) e.getCause();
+      } else if (e instanceof InvocationTargetException) {
+        throw new IOException(message,
+            ((InvocationTargetException) e).getTargetException());
+      } else {
+        throw new IOException(message, e);
+      }
+    }
+  }
+
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientInvocationHandler.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientInvocationHandler.java
new file mode 100644
index 0000000..3051e2d
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientInvocationHandler.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+/**
+ * Invocation Handler for ozone client which dispatches the call to underlying
+ * ClientProtocol implementation.
+ */
+public class OzoneClientInvocationHandler implements InvocationHandler {
+
+
+  private static final Logger LOG = LoggerFactory.getLogger(OzoneClient.class);
+  private final ClientProtocol target;
+
+  /**
+   * Constructs OzoneClientInvocationHandler with the proxy.
+   * @param target proxy to be used for method invocation.
+   */
+  public OzoneClientInvocationHandler(ClientProtocol target) {
+    this.target = target;
+  }
+
+  @Override
+  public Object invoke(Object proxy, Method method, Object[] args)
+      throws Throwable {
+    LOG.trace("Invoking method {} on proxy {}", method, proxy);
+    try {
+      long startTime = Time.monotonicNow();
+      Object result = method.invoke(target, args);
+      LOG.debug("Call: {} took {} ms", method,
+          Time.monotonicNow() - startTime);
+      return result;
+    } catch(InvocationTargetException iEx) {
+      throw iEx.getCause();
+    }
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneKey.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneKey.java
new file mode 100644
index 0000000..0c723dd
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneKey.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+/**
+ * A class that encapsulates OzoneKey.
+ */
+public class OzoneKey {
+
+  /**
+   * Name of the Volume the Key belongs to.
+   */
+  private final String volumeName;
+  /**
+   * Name of the Bucket the Key belongs to.
+   */
+  private final String bucketName;
+  /**
+   * Name of the Key.
+   */
+  private final String name;
+  /**
+   * Size of the data.
+   */
+  private final long dataSize;
+  /**
+   * Creation time of the key.
+   */
+  private long creationTime;
+  /**
+   * Modification time of the key.
+   */
+  private long modificationTime;
+
+  /**
+   * Constructs OzoneKey from KsmKeyInfo.
+   *
+   */
+  public OzoneKey(String volumeName, String bucketName,
+                  String keyName, long size, long creationTime,
+                  long modificationTime) {
+    this.volumeName = volumeName;
+    this.bucketName = bucketName;
+    this.name = keyName;
+    this.dataSize = size;
+    this.creationTime = creationTime;
+    this.modificationTime = modificationTime;
+  }
+
+  /**
+   * Returns Volume Name associated with the Key.
+   *
+   * @return volumeName
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Returns Bucket Name associated with the Key.
+   *
+   * @return bucketName
+   */
+  public String getBucketName(){
+    return bucketName;
+  }
+
+  /**
+   * Returns the Key Name.
+   *
+   * @return keyName
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * Returns the size of the data.
+   *
+   * @return dataSize
+   */
+  public long getDataSize() {
+    return dataSize;
+  }
+
+  /**
+   * Returns the creation time of the key.
+   *
+   * @return creation time
+   */
+  public long getCreationTime() {
+    return creationTime;
+  }
+
+  /**
+   * Returns the modification time of the key.
+   *
+   * @return modification time
+   */
+  public long getModificationTime() {
+    return modificationTime;
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneVolume.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneVolume.java
new file mode 100644
index 0000000..c4e7331
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/OzoneVolume.java
@@ -0,0 +1,295 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * A class that encapsulates OzoneVolume.
+ */
+public class OzoneVolume {
+
+  /**
+   * The proxy used for connecting to the cluster and perform
+   * client operations.
+   */
+  private final ClientProtocol proxy;
+
+  /**
+   * Name of the Volume.
+   */
+  private final String name;
+
+  /**
+   * Admin Name of the Volume.
+   */
+  private String admin;
+  /**
+   * Owner of the Volume.
+   */
+  private String owner;
+  /**
+   * Quota allocated for the Volume.
+   */
+  private long quotaInBytes;
+  /**
+   * Creation time of the volume.
+   */
+  private long creationTime;
+  /**
+   * Volume ACLs.
+   */
+  private List<OzoneAcl> acls;
+
+  private int listCacheSize;
+
+  /**
+   * Constructs OzoneVolume instance.
+   * @param conf Configuration object.
+   * @param proxy ClientProtocol proxy.
+   * @param name Name of the volume.
+   * @param admin Volume admin.
+   * @param owner Volume owner.
+   * @param quotaInBytes Volume quota in bytes.
+   * @param creationTime creation time of the volume
+   * @param acls ACLs associated with the volume.
+   */
+  public OzoneVolume(Configuration conf, ClientProtocol proxy, String name,
+                     String admin, String owner, long quotaInBytes,
+                     long creationTime, List<OzoneAcl> acls) {
+    this.proxy = proxy;
+    this.name = name;
+    this.admin = admin;
+    this.owner = owner;
+    this.quotaInBytes = quotaInBytes;
+    this.creationTime = creationTime;
+    this.acls = acls;
+    this.listCacheSize = HddsClientUtils.getListCacheSize(conf);
+  }
+
+  /**
+   * Returns Volume name.
+   *
+   * @return volumeName
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * Returns Volume's admin name.
+   *
+   * @return adminName
+   */
+  public String getAdmin() {
+    return admin;
+  }
+
+  /**
+   * Returns Volume's owner name.
+   *
+   * @return ownerName
+   */
+  public String getOwner() {
+    return owner;
+  }
+
+  /**
+   * Returns Quota allocated for the Volume in bytes.
+   *
+   * @return quotaInBytes
+   */
+  public long getQuota() {
+    return quotaInBytes;
+  }
+
+  /**
+   * Returns creation time of the volume.
+   *
+   * @return creation time.
+   */
+  public long getCreationTime() {
+    return creationTime;
+  }
+
+  /**
+   * Returns OzoneAcl list associated with the Volume.
+   *
+   * @return aclMap
+   */
+  public List<OzoneAcl> getAcls() {
+    return acls;
+  }
+
+  /**
+   * Sets/Changes the owner of this Volume.
+   * @param owner new owner
+   * @throws IOException
+   */
+  public void setOwner(String owner) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(owner);
+    proxy.setVolumeOwner(name, owner);
+    this.owner = owner;
+  }
+
+  /**
+   * Sets/Changes the quota of this Volume.
+   * @param quota new quota
+   * @throws IOException
+   */
+  public void setQuota(OzoneQuota  quota) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(quota);
+    proxy.setVolumeQuota(name, quota);
+    this.quotaInBytes = quota.sizeInBytes();
+  }
+
+  /**
+   * Creates a new Bucket in this Volume, with default values.
+   * @param bucketName Name of the Bucket
+   * @throws IOException
+   */
+  public void createBucket(String bucketName)
+      throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(bucketName);
+    HddsClientUtils.verifyResourceName(bucketName);
+    proxy.createBucket(name, bucketName);
+  }
+
+  /**
+   * Creates a new Bucket in this Volume, with properties set in bucketArgs.
+   * @param bucketName Name of the Bucket
+   * @param bucketArgs Properties to be set
+   * @throws IOException
+   */
+  public void createBucket(String bucketName, BucketArgs bucketArgs)
+      throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(bucketArgs);
+    HddsClientUtils.verifyResourceName(bucketName);
+    proxy.createBucket(name, bucketName, bucketArgs);
+  }
+
+  /**
+   * Get the Bucket from this Volume.
+   * @param bucketName Name of the Bucket
+   * @return OzoneBucket
+   * @throws IOException
+   */
+  public OzoneBucket getBucket(String bucketName) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(bucketName);
+    HddsClientUtils.verifyResourceName(bucketName);
+    OzoneBucket bucket = proxy.getBucketDetails(name, bucketName);
+    return bucket;
+  }
+
+  /**
+   * Returns Iterator to iterate over all buckets in the volume.
+   * The result can be restricted using bucket prefix, will return all
+   * buckets if bucket prefix is null.
+   *
+   * @param bucketPrefix Bucket prefix to match
+   * @return {@code Iterator<OzoneBucket>}
+   */
+  public Iterator<OzoneBucket> listBuckets(String bucketPrefix) {
+    return new BucketIterator(bucketPrefix);
+  }
+
+  /**
+   * Deletes the Bucket from this Volume.
+   * @param bucketName Name of the Bucket
+   * @throws IOException
+   */
+  public void deleteBucket(String bucketName) throws IOException {
+    Preconditions.checkNotNull(proxy, "Client proxy is not set.");
+    Preconditions.checkNotNull(bucketName);
+    HddsClientUtils.verifyResourceName(bucketName);
+    proxy.deleteBucket(name, bucketName);
+  }
+
+
+  /**
+   * An Iterator to iterate over {@link OzoneBucket} list.
+   */
+  private class BucketIterator implements Iterator<OzoneBucket> {
+
+    private String bucketPrefix = null;
+
+    private Iterator<OzoneBucket> currentIterator;
+    private OzoneBucket currentValue;
+
+
+    /**
+     * Creates an Iterator to iterate over all buckets in the volume,
+     * which matches volume prefix.
+     * @param bucketPrefix
+     */
+    BucketIterator(String bucketPrefix) {
+      this.bucketPrefix = bucketPrefix;
+      this.currentValue = null;
+      this.currentIterator = getNextListOfBuckets(null).iterator();
+    }
+
+    @Override
+    public boolean hasNext() {
+      if(!currentIterator.hasNext()) {
+        currentIterator = getNextListOfBuckets(
+            currentValue != null ? currentValue.getName() : null)
+            .iterator();
+      }
+      return currentIterator.hasNext();
+    }
+
+    @Override
+    public OzoneBucket next() {
+      if(hasNext()) {
+        currentValue = currentIterator.next();
+        return currentValue;
+      }
+      throw new NoSuchElementException();
+    }
+
+    /**
+     * Gets the next set of bucket list using proxy.
+     * @param prevBucket
+     * @return {@code List<OzoneVolume>}
+     */
+    private List<OzoneBucket> getNextListOfBuckets(String prevBucket) {
+      try {
+        return proxy.listBuckets(name, bucketPrefix, prevBucket, listCacheSize);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/VolumeArgs.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/VolumeArgs.java
new file mode 100644
index 0000000..f1aa031
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/VolumeArgs.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import org.apache.hadoop.ozone.OzoneAcl;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * This class encapsulates the arguments that are
+ * required for creating a volume.
+ */
+public final class VolumeArgs {
+
+  private final String admin;
+  private final String owner;
+  private final String quota;
+  private final List<OzoneAcl> acls;
+
+  /**
+   * Private constructor, constructed via builder.
+   * @param admin Administrator's name.
+   * @param owner Volume owner's name
+   * @param quota Volume Quota.
+   * @param acls User to access rights map.
+   */
+  private VolumeArgs(String admin, String owner,
+                        String quota, List<OzoneAcl> acls) {
+    this.admin = admin;
+    this.owner = owner;
+    this.quota = quota;
+    this.acls = acls;
+  }
+
+  /**
+   * Returns the Admin Name.
+   * @return String.
+   */
+  public String getAdmin() {
+    return admin;
+  }
+
+  /**
+   * Returns the owner Name.
+   * @return String
+   */
+  public String getOwner() {
+    return owner;
+  }
+
+  /**
+   * Returns Volume Quota.
+   * @return Quota.
+   */
+  public String getQuota() {
+    return quota;
+  }
+
+  public List<OzoneAcl> getAcls() {
+    return acls;
+  }
+  /**
+   * Returns new builder class that builds a KsmVolumeArgs.
+   *
+   * @return Builder
+   */
+  public static VolumeArgs.Builder newBuilder() {
+    return new VolumeArgs.Builder();
+  }
+
+  /**
+   * Builder for KsmVolumeArgs.
+   */
+  public static class Builder {
+    private String adminName;
+    private String ownerName;
+    private String volumeQuota;
+    private List<OzoneAcl> listOfAcls;
+
+
+    public VolumeArgs.Builder setAdmin(String admin) {
+      this.adminName = admin;
+      return this;
+    }
+
+    public VolumeArgs.Builder setOwner(String owner) {
+      this.ownerName = owner;
+      return this;
+    }
+
+    public VolumeArgs.Builder setQuota(String quota) {
+      this.volumeQuota = quota;
+      return this;
+    }
+
+    public VolumeArgs.Builder setAcls(List<OzoneAcl> acls)
+        throws IOException {
+      this.listOfAcls = acls;
+      return this;
+    }
+
+    /**
+     * Constructs a CreateVolumeArgument.
+     * @return CreateVolumeArgs.
+     */
+    public VolumeArgs build() {
+      return new VolumeArgs(adminName, ownerName, volumeQuota, listOfAcls);
+    }
+  }
+
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ChunkGroupInputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ChunkGroupInputStream.java
new file mode 100644
index 0000000..b82ed25
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ChunkGroupInputStream.java
@@ -0,0 +1,319 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.client.io;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.fs.FSExceptionMessages;
+import org.apache.hadoop.fs.Seekable;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.storage.ChunkInputStream;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
+import org.apache.ratis.util.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Maintaining a list of ChunkInputStream. Read based on offset.
+ */
+public class ChunkGroupInputStream extends InputStream implements Seekable {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ChunkGroupInputStream.class);
+
+  private static final int EOF = -1;
+
+  private final ArrayList<ChunkInputStreamEntry> streamEntries;
+  // streamOffset[i] stores the offset at which chunkInputStream i stores
+  // data in the key
+  private long[] streamOffset = null;
+  private int currentStreamIndex;
+  private long length = 0;
+  private boolean closed = false;
+  private String key;
+
+  public ChunkGroupInputStream() {
+    streamEntries = new ArrayList<>();
+    currentStreamIndex = 0;
+  }
+
+  @VisibleForTesting
+  public synchronized int getCurrentStreamIndex() {
+    return currentStreamIndex;
+  }
+
+  @VisibleForTesting
+  public long getRemainingOfIndex(int index) {
+    return streamEntries.get(index).getRemaining();
+  }
+
+  /**
+   * Append another stream to the end of the list.
+   *
+   * @param stream       the stream instance.
+   * @param streamLength the max number of bytes that should be written to this
+   *                     stream.
+   */
+  public synchronized void addStream(ChunkInputStream stream,
+      long streamLength) {
+    streamEntries.add(new ChunkInputStreamEntry(stream, streamLength));
+  }
+
+
+  @Override
+  public synchronized int read() throws IOException {
+    byte[] buf = new byte[1];
+    if (read(buf, 0, 1) == EOF) {
+      return EOF;
+    }
+    return Byte.toUnsignedInt(buf[0]);
+  }
+
+  @Override
+  public synchronized int read(byte[] b, int off, int len) throws IOException {
+    checkNotClosed();
+    if (b == null) {
+      throw new NullPointerException();
+    }
+    if (off < 0 || len < 0 || len > b.length - off) {
+      throw new IndexOutOfBoundsException();
+    }
+    if (len == 0) {
+      return 0;
+    }
+    int totalReadLen = 0;
+    while (len > 0) {
+      if (streamEntries.size() <= currentStreamIndex) {
+        return totalReadLen == 0 ? EOF : totalReadLen;
+      }
+      ChunkInputStreamEntry current = streamEntries.get(currentStreamIndex);
+      int readLen = Math.min(len, (int)current.getRemaining());
+      int actualLen = current.read(b, off, readLen);
+      // this means the underlying stream has nothing at all, return
+      if (actualLen == EOF) {
+        return totalReadLen > 0 ? totalReadLen : EOF;
+      }
+      totalReadLen += actualLen;
+      // this means there is no more data to read beyond this point, return
+      if (actualLen != readLen) {
+        return totalReadLen;
+      }
+      off += readLen;
+      len -= readLen;
+      if (current.getRemaining() <= 0) {
+        currentStreamIndex += 1;
+      }
+    }
+    return totalReadLen;
+  }
+
+  @Override
+  public void seek(long pos) throws IOException {
+    checkNotClosed();
+    if (pos < 0 || pos >= length) {
+      if (pos == 0) {
+        // It is possible for length and pos to be zero in which case
+        // seek should return instead of throwing exception
+        return;
+      }
+      throw new EOFException(
+          "EOF encountered at pos: " + pos + " for key: " + key);
+    }
+    Preconditions.assertTrue(currentStreamIndex >= 0);
+    if (currentStreamIndex >= streamEntries.size()) {
+      currentStreamIndex = Arrays.binarySearch(streamOffset, pos);
+    } else if (pos < streamOffset[currentStreamIndex]) {
+      currentStreamIndex =
+          Arrays.binarySearch(streamOffset, 0, currentStreamIndex, pos);
+    } else if (pos >= streamOffset[currentStreamIndex] + streamEntries
+        .get(currentStreamIndex).length) {
+      currentStreamIndex = Arrays
+          .binarySearch(streamOffset, currentStreamIndex + 1,
+              streamEntries.size(), pos);
+    }
+    if (currentStreamIndex < 0) {
+      // Binary search returns -insertionPoint - 1  if element is not present
+      // in the array. insertionPoint is the point at which element would be
+      // inserted in the sorted array. We need to adjust the currentStreamIndex
+      // accordingly so that currentStreamIndex = insertionPoint - 1
+      currentStreamIndex = -currentStreamIndex - 2;
+    }
+    // seek to the proper offset in the ChunkInputStream
+    streamEntries.get(currentStreamIndex)
+        .seek(pos - streamOffset[currentStreamIndex]);
+  }
+
+  @Override
+  public long getPos() throws IOException {
+    return length == 0 ? 0 :
+        streamOffset[currentStreamIndex] + streamEntries.get(currentStreamIndex)
+            .getPos();
+  }
+
+  @Override
+  public boolean seekToNewSource(long targetPos) throws IOException {
+    return false;
+  }
+
+  @Override
+  public int available() throws IOException {
+    checkNotClosed();
+    long remaining = length - getPos();
+    return remaining <= Integer.MAX_VALUE ? (int) remaining : Integer.MAX_VALUE;
+  }
+
+  @Override
+  public void close() throws IOException {
+    closed = true;
+    for (int i = 0; i < streamEntries.size(); i++) {
+      streamEntries.get(i).close();
+    }
+  }
+
+  /**
+   * Encapsulates ChunkInputStream.
+   */
+  public static class ChunkInputStreamEntry extends InputStream
+      implements Seekable {
+
+    private final ChunkInputStream chunkInputStream;
+    private final long length;
+    private long currentPosition;
+
+    public ChunkInputStreamEntry(ChunkInputStream chunkInputStream,
+        long length) {
+      this.chunkInputStream = chunkInputStream;
+      this.length = length;
+      this.currentPosition = 0;
+    }
+
+    synchronized long getRemaining() {
+      return length - currentPosition;
+    }
+
+    @Override
+    public synchronized int read(byte[] b, int off, int len)
+        throws IOException {
+      int readLen = chunkInputStream.read(b, off, len);
+      currentPosition += readLen;
+      return readLen;
+    }
+
+    @Override
+    public synchronized int read() throws IOException {
+      int data = chunkInputStream.read();
+      currentPosition += 1;
+      return data;
+    }
+
+    @Override
+    public synchronized void close() throws IOException {
+      chunkInputStream.close();
+    }
+
+    @Override
+    public void seek(long pos) throws IOException {
+      chunkInputStream.seek(pos);
+    }
+
+    @Override
+    public long getPos() throws IOException {
+      return chunkInputStream.getPos();
+    }
+
+    @Override
+    public boolean seekToNewSource(long targetPos) throws IOException {
+      return false;
+    }
+  }
+
+  public static LengthInputStream getFromKsmKeyInfo(KsmKeyInfo keyInfo,
+      XceiverClientManager xceiverClientManager,
+      StorageContainerLocationProtocolClientSideTranslatorPB
+          storageContainerLocationClient, String requestId)
+      throws IOException {
+    long length = 0;
+    String containerKey;
+    ChunkGroupInputStream groupInputStream = new ChunkGroupInputStream();
+    groupInputStream.key = keyInfo.getKeyName();
+    List<KsmKeyLocationInfo> keyLocationInfos =
+        keyInfo.getLatestVersionLocations().getBlocksLatestVersionOnly();
+    groupInputStream.streamOffset = new long[keyLocationInfos.size()];
+    for (int i = 0; i < keyLocationInfos.size(); i++) {
+      KsmKeyLocationInfo ksmKeyLocationInfo = keyLocationInfos.get(i);
+      String containerName = ksmKeyLocationInfo.getContainerName();
+      Pipeline pipeline =
+          storageContainerLocationClient.getContainer(containerName);
+      XceiverClientSpi xceiverClient =
+          xceiverClientManager.acquireClient(pipeline);
+      boolean success = false;
+      containerKey = ksmKeyLocationInfo.getBlockID();
+      try {
+        LOG.debug("get key accessing {} {}",
+            xceiverClient.getPipeline().getContainerName(), containerKey);
+        groupInputStream.streamOffset[i] = length;
+        ContainerProtos.KeyData containerKeyData = OzoneContainerTranslation
+            .containerKeyDataForRead(
+                xceiverClient.getPipeline().getContainerName(), containerKey);
+        ContainerProtos.GetKeyResponseProto response = ContainerProtocolCalls
+            .getKey(xceiverClient, containerKeyData, requestId);
+        List<ContainerProtos.ChunkInfo> chunks =
+            response.getKeyData().getChunksList();
+        for (ContainerProtos.ChunkInfo chunk : chunks) {
+          length += chunk.getLen();
+        }
+        success = true;
+        ChunkInputStream inputStream = new ChunkInputStream(
+            containerKey, xceiverClientManager, xceiverClient,
+            chunks, requestId);
+        groupInputStream.addStream(inputStream,
+            ksmKeyLocationInfo.getLength());
+      } finally {
+        if (!success) {
+          xceiverClientManager.releaseClient(xceiverClient);
+        }
+      }
+    }
+    groupInputStream.length = length;
+    return new LengthInputStream(groupInputStream, length);
+  }
+
+  /**
+   * Verify that the input stream is open. Non blocking; this gives
+   * the last state of the volatile {@link #closed} field.
+   * @throws IOException if the connection is closed.
+   */
+  private void checkNotClosed() throws IOException {
+    if (closed) {
+      throw new IOException(
+          ": " + FSExceptionMessages.STREAM_IS_CLOSED + " Key: " + key);
+    }
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ChunkGroupOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ChunkGroupOutputStream.java
new file mode 100644
index 0000000..4c465d3
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ChunkGroupOutputStream.java
@@ -0,0 +1,495 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.client.io;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.fs.FSExceptionMessages;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Result;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.ksm.protocolPB.KeySpaceManagerProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.storage.ChunkOutputStream;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Maintaining a list of ChunkInputStream. Write based on offset.
+ *
+ * Note that this may write to multiple containers in one write call. In case
+ * that first container succeeded but later ones failed, the succeeded writes
+ * are not rolled back.
+ *
+ * TODO : currently not support multi-thread access.
+ */
+public class ChunkGroupOutputStream extends OutputStream {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ChunkGroupOutputStream.class);
+
+  // array list's get(index) is O(1)
+  private final ArrayList<ChunkOutputStreamEntry> streamEntries;
+  private int currentStreamIndex;
+  private long byteOffset;
+  private final KeySpaceManagerProtocolClientSideTranslatorPB ksmClient;
+  private final
+      StorageContainerLocationProtocolClientSideTranslatorPB scmClient;
+  private final KsmKeyArgs keyArgs;
+  private final int openID;
+  private final XceiverClientManager xceiverClientManager;
+  private final int chunkSize;
+  private final String requestID;
+  private boolean closed;
+
+  /**
+   * A constructor for testing purpose only.
+   */
+  @VisibleForTesting
+  public ChunkGroupOutputStream() {
+    streamEntries = new ArrayList<>();
+    ksmClient = null;
+    scmClient = null;
+    keyArgs = null;
+    openID = -1;
+    xceiverClientManager = null;
+    chunkSize = 0;
+    requestID = null;
+    closed = false;
+  }
+
+  /**
+   * For testing purpose only. Not building output stream from blocks, but
+   * taking from externally.
+   *
+   * @param outputStream
+   * @param length
+   */
+  @VisibleForTesting
+  public synchronized void addStream(OutputStream outputStream, long length) {
+    streamEntries.add(new ChunkOutputStreamEntry(outputStream, length));
+  }
+
+  @VisibleForTesting
+  public List<ChunkOutputStreamEntry> getStreamEntries() {
+    return streamEntries;
+  }
+
+  public ChunkGroupOutputStream(
+      OpenKeySession handler, XceiverClientManager xceiverClientManager,
+      StorageContainerLocationProtocolClientSideTranslatorPB scmClient,
+      KeySpaceManagerProtocolClientSideTranslatorPB ksmClient,
+      int chunkSize, String requestId, ReplicationFactor factor,
+      ReplicationType type) throws IOException {
+    this.streamEntries = new ArrayList<>();
+    this.currentStreamIndex = 0;
+    this.byteOffset = 0;
+    this.ksmClient = ksmClient;
+    this.scmClient = scmClient;
+    KsmKeyInfo info = handler.getKeyInfo();
+    this.keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(info.getVolumeName())
+        .setBucketName(info.getBucketName())
+        .setKeyName(info.getKeyName())
+        .setType(type)
+        .setFactor(factor)
+        .setDataSize(info.getDataSize()).build();
+    this.openID = handler.getId();
+    this.xceiverClientManager = xceiverClientManager;
+    this.chunkSize = chunkSize;
+    this.requestID = requestId;
+    LOG.debug("Expecting open key with one block, but got" +
+        info.getKeyLocationVersions().size());
+  }
+
+  /**
+   * When a key is opened, it is possible that there are some blocks already
+   * allocated to it for this open session. In this case, to make use of these
+   * blocks, we need to add these blocks to stream entries. But, a key's version
+   * also includes blocks from previous versions, we need to avoid adding these
+   * old blocks to stream entries, because these old blocks should not be picked
+   * for write. To do this, the following method checks that, only those
+   * blocks created in this particular open version are added to stream entries.
+   *
+   * @param version the set of blocks that are pre-allocated.
+   * @param openVersion the version corresponding to the pre-allocation.
+   * @throws IOException
+   */
+  public void addPreallocateBlocks(KsmKeyLocationInfoGroup version,
+      long openVersion) throws IOException {
+    // server may return any number of blocks, (0 to any)
+    // only the blocks allocated in this open session (block createVersion
+    // equals to open session version)
+    for (KsmKeyLocationInfo subKeyInfo : version.getLocationList()) {
+      if (subKeyInfo.getCreateVersion() == openVersion) {
+        checkKeyLocationInfo(subKeyInfo);
+      }
+    }
+  }
+
+  private void checkKeyLocationInfo(KsmKeyLocationInfo subKeyInfo)
+      throws IOException {
+    String containerKey = subKeyInfo.getBlockID();
+    String containerName = subKeyInfo.getContainerName();
+    Pipeline pipeline = scmClient.getContainer(containerName);
+    XceiverClientSpi xceiverClient =
+        xceiverClientManager.acquireClient(pipeline);
+    // create container if needed
+    if (subKeyInfo.getShouldCreateContainer()) {
+      try {
+        ContainerProtocolCalls.createContainer(xceiverClient, requestID);
+        scmClient.notifyObjectStageChange(
+            ObjectStageChangeRequestProto.Type.container,
+            containerName, ObjectStageChangeRequestProto.Op.create,
+            ObjectStageChangeRequestProto.Stage.complete);
+      } catch (StorageContainerException ex) {
+        if (ex.getResult().equals(Result.CONTAINER_EXISTS)) {
+          //container already exist, this should never happen
+          LOG.debug("Container {} already exists.", containerName);
+        } else {
+          LOG.error("Container creation failed for {}.", containerName, ex);
+          throw ex;
+        }
+      }
+    }
+    streamEntries.add(new ChunkOutputStreamEntry(containerKey,
+        keyArgs.getKeyName(), xceiverClientManager, xceiverClient, requestID,
+        chunkSize, subKeyInfo.getLength()));
+  }
+
+
+  @VisibleForTesting
+  public long getByteOffset() {
+    return byteOffset;
+  }
+
+
+  @Override
+  public synchronized void write(int b) throws IOException {
+    checkNotClosed();
+
+    if (streamEntries.size() <= currentStreamIndex) {
+      Preconditions.checkNotNull(ksmClient);
+      // allocate a new block, if a exception happens, log an error and
+      // throw exception to the caller directly, and the write fails.
+      try {
+        allocateNewBlock(currentStreamIndex);
+      } catch (IOException ioe) {
+        LOG.error("Allocate block fail when writing.");
+        throw ioe;
+      }
+    }
+    ChunkOutputStreamEntry entry = streamEntries.get(currentStreamIndex);
+    entry.write(b);
+    if (entry.getRemaining() <= 0) {
+      currentStreamIndex += 1;
+    }
+    byteOffset += 1;
+  }
+
+  /**
+   * Try to write the bytes sequence b[off:off+len) to streams.
+   *
+   * NOTE: Throws exception if the data could not fit into the remaining space.
+   * In which case nothing will be written.
+   * TODO:May need to revisit this behaviour.
+   *
+   * @param b byte data
+   * @param off starting offset
+   * @param len length to write
+   * @throws IOException
+   */
+  @Override
+  public synchronized void write(byte[] b, int off, int len)
+      throws IOException {
+    checkNotClosed();
+
+    if (b == null) {
+      throw new NullPointerException();
+    }
+    if ((off < 0) || (off > b.length) || (len < 0) ||
+        ((off + len) > b.length) || ((off + len) < 0)) {
+      throw new IndexOutOfBoundsException();
+    }
+    if (len == 0) {
+      return;
+    }
+    int succeededAllocates = 0;
+    while (len > 0) {
+      if (streamEntries.size() <= currentStreamIndex) {
+        Preconditions.checkNotNull(ksmClient);
+        // allocate a new block, if a exception happens, log an error and
+        // throw exception to the caller directly, and the write fails.
+        try {
+          allocateNewBlock(currentStreamIndex);
+          succeededAllocates += 1;
+        } catch (IOException ioe) {
+          LOG.error("Try to allocate more blocks for write failed, already " +
+              "allocated " + succeededAllocates + " blocks for this write.");
+          throw ioe;
+        }
+      }
+      // in theory, this condition should never violate due the check above
+      // still do a sanity check.
+      Preconditions.checkArgument(currentStreamIndex < streamEntries.size());
+      ChunkOutputStreamEntry current = streamEntries.get(currentStreamIndex);
+      int writeLen = Math.min(len, (int)current.getRemaining());
+      current.write(b, off, writeLen);
+      if (current.getRemaining() <= 0) {
+        currentStreamIndex += 1;
+      }
+      len -= writeLen;
+      off += writeLen;
+      byteOffset += writeLen;
+    }
+  }
+
+  /**
+   * Contact KSM to get a new block. Set the new block with the index (e.g.
+   * first block has index = 0, second has index = 1 etc.)
+   *
+   * The returned block is made to new ChunkOutputStreamEntry to write.
+   *
+   * @param index the index of the block.
+   * @throws IOException
+   */
+  private void allocateNewBlock(int index) throws IOException {
+    KsmKeyLocationInfo subKeyInfo = ksmClient.allocateBlock(keyArgs, openID);
+    checkKeyLocationInfo(subKeyInfo);
+  }
+
+  @Override
+  public synchronized void flush() throws IOException {
+    checkNotClosed();
+    if (streamEntries.size() == 0) {
+      return;
+    }
+    for (int i = 0; i <= currentStreamIndex; i++) {
+      streamEntries.get(i).flush();
+    }
+  }
+
+  /**
+   * Commit the key to KSM, this will add the blocks as the new key blocks.
+   *
+   * @throws IOException
+   */
+  @Override
+  public synchronized void close() throws IOException {
+    if (closed) {
+      return;
+    }
+    closed = true;
+    for (ChunkOutputStreamEntry entry : streamEntries) {
+      if (entry != null) {
+        entry.close();
+      }
+    }
+    if (keyArgs != null) {
+      // in test, this could be null
+      keyArgs.setDataSize(byteOffset);
+      ksmClient.commitKey(keyArgs, openID);
+    } else {
+      LOG.warn("Closing ChunkGroupOutputStream, but key args is null");
+    }
+  }
+
+  /**
+   * Builder class of ChunkGroupOutputStream.
+   */
+  public static class Builder {
+    private OpenKeySession openHandler;
+    private XceiverClientManager xceiverManager;
+    private StorageContainerLocationProtocolClientSideTranslatorPB scmClient;
+    private KeySpaceManagerProtocolClientSideTranslatorPB ksmClient;
+    private int chunkSize;
+    private String requestID;
+    private ReplicationType type;
+    private ReplicationFactor factor;
+
+    public Builder setHandler(OpenKeySession handler) {
+      this.openHandler = handler;
+      return this;
+    }
+
+    public Builder setXceiverClientManager(XceiverClientManager manager) {
+      this.xceiverManager = manager;
+      return this;
+    }
+
+    public Builder setScmClient(
+        StorageContainerLocationProtocolClientSideTranslatorPB client) {
+      this.scmClient = client;
+      return this;
+    }
+
+    public Builder setKsmClient(
+        KeySpaceManagerProtocolClientSideTranslatorPB client) {
+      this.ksmClient = client;
+      return this;
+    }
+
+    public Builder setChunkSize(int size) {
+      this.chunkSize = size;
+      return this;
+    }
+
+    public Builder setRequestID(String id) {
+      this.requestID = id;
+      return this;
+    }
+
+    public Builder setType(ReplicationType replicationType) {
+      this.type = replicationType;
+      return this;
+    }
+
+    public Builder setFactor(ReplicationFactor replicationFactor) {
+      this.factor = replicationFactor;
+      return this;
+    }
+
+    public ChunkGroupOutputStream build() throws IOException {
+      return new ChunkGroupOutputStream(openHandler, xceiverManager, scmClient,
+          ksmClient, chunkSize, requestID, factor, type);
+    }
+  }
+
+  private static class ChunkOutputStreamEntry extends OutputStream {
+    private OutputStream outputStream;
+    private final String containerKey;
+    private final String key;
+    private final XceiverClientManager xceiverClientManager;
+    private final XceiverClientSpi xceiverClient;
+    private final String requestId;
+    private final int chunkSize;
+    // total number of bytes that should be written to this stream
+    private final long length;
+    // the current position of this stream 0 <= currentPosition < length
+    private long currentPosition;
+
+    ChunkOutputStreamEntry(String containerKey, String key,
+        XceiverClientManager xceiverClientManager,
+        XceiverClientSpi xceiverClient, String requestId, int chunkSize,
+        long length) {
+      this.outputStream = null;
+      this.containerKey = containerKey;
+      this.key = key;
+      this.xceiverClientManager = xceiverClientManager;
+      this.xceiverClient = xceiverClient;
+      this.requestId = requestId;
+      this.chunkSize = chunkSize;
+
+      this.length = length;
+      this.currentPosition = 0;
+    }
+
+    /**
+     * For testing purpose, taking a some random created stream instance.
+     * @param  outputStream a existing writable output stream
+     * @param  length the length of data to write to the stream
+     */
+    ChunkOutputStreamEntry(OutputStream outputStream, long length) {
+      this.outputStream = outputStream;
+      this.containerKey = null;
+      this.key = null;
+      this.xceiverClientManager = null;
+      this.xceiverClient = null;
+      this.requestId = null;
+      this.chunkSize = -1;
+
+      this.length = length;
+      this.currentPosition = 0;
+    }
+
+    long getLength() {
+      return length;
+    }
+
+    long getRemaining() {
+      return length - currentPosition;
+    }
+
+    private synchronized void checkStream() {
+      if (this.outputStream == null) {
+        this.outputStream = new ChunkOutputStream(containerKey,
+            key, xceiverClientManager, xceiverClient,
+            requestId, chunkSize);
+      }
+    }
+
+    @Override
+    public void write(int b) throws IOException {
+      checkStream();
+      outputStream.write(b);
+      this.currentPosition += 1;
+    }
+
+    @Override
+    public void write(byte[] b, int off, int len) throws IOException {
+      checkStream();
+      outputStream.write(b, off, len);
+      this.currentPosition += len;
+    }
+
+    @Override
+    public void flush() throws IOException {
+      if (this.outputStream != null) {
+        this.outputStream.flush();
+      }
+    }
+
+    @Override
+    public void close() throws IOException {
+      if (this.outputStream != null) {
+        this.outputStream.close();
+      }
+    }
+  }
+
+  /**
+   * Verify that the output stream is open. Non blocking; this gives
+   * the last state of the volatile {@link #closed} field.
+   * @throws IOException if the connection is closed.
+   */
+  private void checkNotClosed() throws IOException {
+    if (closed) {
+      throw new IOException(
+          ": " + FSExceptionMessages.STREAM_IS_CLOSED + " Key: " + keyArgs
+              .getKeyName());
+    }
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneContainerTranslation.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneContainerTranslation.java
new file mode 100644
index 0000000..bf9e80f
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneContainerTranslation.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.io;
+
+
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData;
+
+/**
+ * This class contains methods that define the translation between the Ozone
+ * domain model and the storage container domain model.
+ */
+final class OzoneContainerTranslation {
+
+  /**
+   * Creates key data intended for reading a container key.
+   *
+   * @param containerName container name
+   * @param containerKey container key
+   * @return KeyData intended for reading the container key
+   */
+  public static KeyData containerKeyDataForRead(String containerName,
+      String containerKey) {
+    return KeyData
+        .newBuilder()
+        .setContainerName(containerName)
+        .setName(containerKey)
+        .build();
+  }
+
+  /**
+   * There is no need to instantiate this class.
+   */
+  private OzoneContainerTranslation() {
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneInputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneInputStream.java
new file mode 100644
index 0000000..c2ff979
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneInputStream.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.client.io;
+
+import org.apache.hadoop.hdds.scm.storage.ChunkInputStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * OzoneInputStream is used to read data from Ozone.
+ * It uses SCM's {@link ChunkInputStream} for reading the data.
+ */
+public class OzoneInputStream extends InputStream {
+
+  private final InputStream inputStream;
+
+  /**
+   * Constructs OzoneInputStream with ChunkInputStream.
+   *
+   * @param inputStream
+   */
+  public OzoneInputStream(InputStream inputStream) {
+    this.inputStream = inputStream;
+  }
+
+  @Override
+  public int read() throws IOException {
+    return inputStream.read();
+  }
+
+  @Override
+  public synchronized void close() throws IOException {
+    inputStream.close();
+  }
+
+  @Override
+  public int available() throws IOException {
+    return inputStream.available();
+  }
+
+  public InputStream getInputStream() {
+    return inputStream;
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneOutputStream.java
new file mode 100644
index 0000000..5369220
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/OzoneOutputStream.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.client.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * OzoneOutputStream is used to write data into Ozone.
+ * It uses SCM's {@link ChunkGroupOutputStream} for writing the data.
+ */
+public class OzoneOutputStream extends OutputStream {
+
+  private final OutputStream outputStream;
+
+  /**
+   * Constructs OzoneOutputStream with ChunkGroupOutputStream.
+   *
+   * @param outputStream
+   */
+  public OzoneOutputStream(OutputStream outputStream) {
+    this.outputStream = outputStream;
+  }
+
+  @Override
+  public void write(int b) throws IOException {
+    outputStream.write(b);
+  }
+
+  @Override
+  public void write(byte[] b, int off, int len) throws IOException {
+    outputStream.write(b, off, len);
+  }
+
+  @Override
+  public synchronized void flush() throws IOException {
+    outputStream.flush();
+  }
+
+  @Override
+  public synchronized void close() throws IOException {
+    //commitKey can be done here, if needed.
+    outputStream.close();
+  }
+
+  public OutputStream getOutputStream() {
+    return outputStream;
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/package-info.java
new file mode 100644
index 0000000..493ece8
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.io;
+
+/**
+ * This package contains Ozone I/O classes.
+ */
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/package-info.java
new file mode 100644
index 0000000..7e2591a
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+/**
+ * This package contains Ozone Client classes.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
new file mode 100644
index 0000000..816c185
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java
@@ -0,0 +1,323 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.protocol;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.client.BucketArgs;
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneKey;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+import org.apache.hadoop.ozone.client.OzoneVolume;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.ozone.client.VolumeArgs;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * An implementer of this interface is capable of connecting to Ozone Cluster
+ * and perform client operations. The protocol used for communication is
+ * determined by the implementation class specified by
+ * property <code>ozone.client.protocol</code>. The build-in implementation
+ * includes: {@link org.apache.hadoop.ozone.client.rpc.RpcClient} for RPC and
+ * {@link  org.apache.hadoop.ozone.client.rest.RestClient} for REST.
+ */
+public interface ClientProtocol {
+
+  /**
+   * Creates a new Volume.
+   * @param volumeName Name of the Volume
+   * @throws IOException
+   */
+  void createVolume(String volumeName)
+      throws IOException;
+
+  /**
+   * Creates a new Volume with properties set in VolumeArgs.
+   * @param volumeName Name of the Volume
+   * @param args Properties to be set for the Volume
+   * @throws IOException
+   */
+  void createVolume(String volumeName, VolumeArgs args)
+      throws IOException;
+
+  /**
+   * Sets the owner of volume.
+   * @param volumeName Name of the Volume
+   * @param owner to be set for the Volume
+   * @throws IOException
+   */
+  void setVolumeOwner(String volumeName, String owner) throws IOException;
+
+  /**
+   * Set Volume Quota.
+   * @param volumeName Name of the Volume
+   * @param quota Quota to be set for the Volume
+   * @throws IOException
+   */
+  void setVolumeQuota(String volumeName, OzoneQuota quota)
+      throws IOException;
+
+  /**
+   * Returns {@link OzoneVolume}.
+   * @param volumeName Name of the Volume
+   * @return {@link OzoneVolume}
+   * @throws IOException
+   * */
+  OzoneVolume getVolumeDetails(String volumeName)
+      throws IOException;
+
+  /**
+   * Checks if a Volume exists and the user with a role specified has access
+   * to the Volume.
+   * @param volumeName Name of the Volume
+   * @param acl requested acls which needs to be checked for access
+   * @return Boolean - True if the user with a role can access the volume.
+   * This is possible for owners of the volume and admin users
+   * @throws IOException
+   */
+  boolean checkVolumeAccess(String volumeName, OzoneAcl acl)
+      throws IOException;
+
+  /**
+   * Deletes an empty Volume.
+   * @param volumeName Name of the Volume
+   * @throws IOException
+   */
+  void deleteVolume(String volumeName) throws IOException;
+
+  /**
+   * Lists all volumes in the cluster that matches the volumePrefix,
+   * size of the returned list depends on maxListResult. If volume prefix
+   * is null, returns all the volumes. The caller has to make multiple calls
+   * to read all volumes.
+   *
+   * @param volumePrefix Volume prefix to match
+   * @param prevVolume Starting point of the list, this volume is excluded
+   * @param maxListResult Max number of volumes to return.
+   * @return {@code List<OzoneVolume>}
+   * @throws IOException
+   */
+  List<OzoneVolume> listVolumes(String volumePrefix, String prevVolume,
+                                int maxListResult)
+      throws IOException;
+
+  /**
+   * Lists all volumes in the cluster that are owned by the specified
+   * user and matches the volumePrefix, size of the returned list depends on
+   * maxListResult. If the user is null, return volumes owned by current user.
+   * If volume prefix is null, returns all the volumes. The caller has to make
+   * multiple calls to read all volumes.
+   *
+   * @param user User Name
+   * @param volumePrefix Volume prefix to match
+   * @param prevVolume Starting point of the list, this volume is excluded
+   * @param maxListResult Max number of volumes to return.
+   * @return {@code List<OzoneVolume>}
+   * @throws IOException
+   */
+  List<OzoneVolume> listVolumes(String user, String volumePrefix,
+                                    String prevVolume, int maxListResult)
+      throws IOException;
+
+  /**
+   * Creates a new Bucket in the Volume.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @throws IOException
+   */
+  void createBucket(String volumeName, String bucketName)
+      throws IOException;
+
+  /**
+   * Creates a new Bucket in the Volume, with properties set in BucketArgs.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param bucketArgs Bucket Arguments
+   * @throws IOException
+   */
+  void createBucket(String volumeName, String bucketName,
+                    BucketArgs bucketArgs)
+      throws IOException;
+
+  /**
+   * Adds ACLs to the Bucket.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param addAcls ACLs to be added
+   * @throws IOException
+   */
+  void addBucketAcls(String volumeName, String bucketName,
+                     List<OzoneAcl> addAcls)
+      throws IOException;
+
+  /**
+   * Removes ACLs from a Bucket.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param removeAcls ACLs to be removed
+   * @throws IOException
+   */
+  void removeBucketAcls(String volumeName, String bucketName,
+                        List<OzoneAcl> removeAcls)
+      throws IOException;
+
+
+  /**
+   * Enables or disables Bucket Versioning.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param versioning True to enable Versioning, False to disable.
+   * @throws IOException
+   */
+  void setBucketVersioning(String volumeName, String bucketName,
+                           Boolean versioning)
+      throws IOException;
+
+  /**
+   * Sets the Storage Class of a Bucket.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param storageType StorageType to be set
+   * @throws IOException
+   */
+  void setBucketStorageType(String volumeName, String bucketName,
+                            StorageType storageType)
+      throws IOException;
+
+  /**
+   * Deletes a bucket if it is empty.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @throws IOException
+   */
+  void deleteBucket(String volumeName, String bucketName)
+      throws IOException;
+
+  /**
+   * True if the bucket exists and user has read access
+   * to the bucket else throws Exception.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @throws IOException
+   */
+  void checkBucketAccess(String volumeName, String bucketName)
+      throws IOException;
+
+  /**
+   * Returns {@link OzoneBucket}.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @return {@link OzoneBucket}
+   * @throws IOException
+   */
+  OzoneBucket getBucketDetails(String volumeName, String bucketName)
+      throws IOException;
+
+  /**
+   * Returns the List of Buckets in the Volume that matches the bucketPrefix,
+   * size of the returned list depends on maxListResult. The caller has to make
+   * multiple calls to read all volumes.
+   * @param volumeName Name of the Volume
+   * @param bucketPrefix Bucket prefix to match
+   * @param prevBucket Starting point of the list, this bucket is excluded
+   * @param maxListResult Max number of buckets to return.
+   * @return {@code List<OzoneBucket>}
+   * @throws IOException
+   */
+  List<OzoneBucket> listBuckets(String volumeName, String bucketPrefix,
+                                String prevBucket, int maxListResult)
+      throws IOException;
+
+  /**
+   * Writes a key in an existing bucket.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param keyName Name of the Key
+   * @param size Size of the data
+   * @return {@link OzoneOutputStream}
+   *
+   */
+  OzoneOutputStream createKey(String volumeName, String bucketName,
+                              String keyName, long size, ReplicationType type,
+                              ReplicationFactor factor)
+      throws IOException;
+
+  /**
+   * Reads a key from an existing bucket.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param keyName Name of the Key
+   * @return {@link OzoneInputStream}
+   * @throws IOException
+   */
+  OzoneInputStream getKey(String volumeName, String bucketName, String keyName)
+      throws IOException;
+
+
+  /**
+   * Deletes an existing key.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param keyName Name of the Key
+   * @throws IOException
+   */
+  void deleteKey(String volumeName, String bucketName, String keyName)
+      throws IOException;
+
+
+  /**
+   * Returns list of Keys in {Volume/Bucket} that matches the keyPrefix,
+   * size of the returned list depends on maxListResult. The caller has
+   * to make multiple calls to read all keys.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param keyPrefix Bucket prefix to match
+   * @param prevKey Starting point of the list, this key is excluded
+   * @param maxListResult Max number of buckets to return.
+   * @return {@code List<OzoneKey>}
+   * @throws IOException
+   */
+  List<OzoneKey> listKeys(String volumeName, String bucketName,
+                          String keyPrefix, String prevKey, int maxListResult)
+      throws IOException;
+
+
+  /**
+   * Get OzoneKey.
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param keyName Key name
+   * @return {@link OzoneKey}
+   * @throws IOException
+   */
+  OzoneKey getKeyDetails(String volumeName, String bucketName,
+                         String keyName)
+      throws IOException;
+
+  /**
+   * Close and release the resources.
+   */
+  void close() throws IOException;
+
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/package-info.java
new file mode 100644
index 0000000..f4890a1
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.protocol;
+
+/**
+ * This package contains Ozone client protocol library classes.
+ */
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/DefaultRestServerSelector.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/DefaultRestServerSelector.java
new file mode 100644
index 0000000..93b3417
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/DefaultRestServerSelector.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+
+import java.util.List;
+import java.util.Random;
+
+/**
+ * Default selector randomly picks one of the REST Server from the list.
+ */
+public class DefaultRestServerSelector implements RestServerSelector {
+
+  @Override
+  public ServiceInfo getRestServer(List<ServiceInfo> restServices) {
+    return restServices.get(
+        new Random().nextInt(restServices.size()));
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/OzoneExceptionMapper.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/OzoneExceptionMapper.java
new file mode 100644
index 0000000..6c479f7
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/OzoneExceptionMapper.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+
+import org.slf4j.MDC;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *  Class the represents various errors returned by the
+ *  Object Layer.
+ */
+public class OzoneExceptionMapper implements ExceptionMapper<OzoneException> {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OzoneExceptionMapper.class);
+
+  @Override
+  public Response toResponse(OzoneException exception) {
+    LOG.debug("Returning exception. ex: {}", exception.toJsonString());
+    MDC.clear();
+    return Response.status((int)exception.getHttpCode())
+      .entity(exception.toJsonString()).build();
+  }
+
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/RestClient.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/RestClient.java
new file mode 100644
index 0000000..b8b4610
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/RestClient.java
@@ -0,0 +1,799 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.BucketArgs;
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneKey;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+import org.apache.hadoop.ozone.client.OzoneVolume;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.ozone.client.VolumeArgs;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.client.rest.response.BucketInfo;
+import org.apache.hadoop.ozone.client.rest.response.KeyInfo;
+import org.apache.hadoop.ozone.client.rest.response.VolumeInfo;
+import org.apache.hadoop.ozone.client.rpc.RpcClient;
+import org.apache.hadoop.ozone.ksm.KSMConfigKeys;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServicePort;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Time;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpHeaders;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.entity.InputStreamEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+import org.apache.http.util.EntityUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.text.ParseException;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+
+/**
+ * Ozone Client REST protocol implementation. It uses REST protocol to
+ * connect to Ozone Handler that executes client calls.
+ */
+public class RestClient implements ClientProtocol {
+
+  private static final String PATH_SEPARATOR = "/";
+  private static final Logger LOG = LoggerFactory.getLogger(RpcClient.class);
+
+  private final Configuration conf;
+  private final URI ozoneRestUri;
+  private final CloseableHttpClient httpClient;
+  private final UserGroupInformation ugi;
+  private final OzoneAcl.OzoneACLRights userRights;
+
+   /**
+    * Creates RestClient instance with the given configuration.
+    * @param conf Configuration
+    * @throws IOException
+    */
+  public RestClient(Configuration conf)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(conf);
+      this.conf = conf;
+
+      long socketTimeout = conf.getTimeDuration(
+          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT,
+          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT,
+          TimeUnit.MILLISECONDS);
+      long connectionTimeout = conf.getTimeDuration(
+          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT,
+          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT,
+          TimeUnit.MILLISECONDS);
+      int maxConnection = conf.getInt(
+          OzoneConfigKeys.OZONE_REST_CLIENT_HTTP_CONNECTION_MAX,
+          OzoneConfigKeys.OZONE_REST_CLIENT_HTTP_CONNECTION_DEFAULT);
+
+      int maxConnectionPerRoute = conf.getInt(
+          OzoneConfigKeys.OZONE_REST_CLIENT_HTTP_CONNECTION_PER_ROUTE_MAX,
+          OzoneConfigKeys
+              .OZONE_REST_CLIENT_HTTP_CONNECTION_PER_ROUTE_MAX_DEFAULT
+      );
+
+      /*
+      To make RestClient Thread safe, creating the HttpClient with
+      ThreadSafeClientConnManager.
+      */
+      PoolingHttpClientConnectionManager connManager =
+          new PoolingHttpClientConnectionManager();
+      connManager.setMaxTotal(maxConnection);
+      connManager.setDefaultMaxPerRoute(maxConnectionPerRoute);
+
+      this.httpClient = HttpClients.custom()
+          .setConnectionManager(connManager)
+          .setDefaultRequestConfig(
+              RequestConfig.custom()
+              .setSocketTimeout(Math.toIntExact(socketTimeout))
+                  .setConnectTimeout(Math.toIntExact(connectionTimeout))
+                  .build())
+          .build();
+      this.ugi = UserGroupInformation.getCurrentUser();
+      this.userRights = conf.getEnum(KSMConfigKeys.OZONE_KSM_USER_RIGHTS,
+          KSMConfigKeys.OZONE_KSM_USER_RIGHTS_DEFAULT);
+
+      // TODO: Add new configuration parameter to configure RestServerSelector.
+      RestServerSelector defaultSelector = new DefaultRestServerSelector();
+      InetSocketAddress restServer = getOzoneRestServerAddress(defaultSelector);
+      URIBuilder uriBuilder = new URIBuilder()
+          .setScheme("http")
+          .setHost(restServer.getHostName())
+          .setPort(restServer.getPort());
+      this.ozoneRestUri = uriBuilder.build();
+
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  private InetSocketAddress getOzoneRestServerAddress(
+      RestServerSelector selector) throws IOException {
+    String httpAddress = conf.get(KSMConfigKeys.OZONE_KSM_HTTP_ADDRESS_KEY);
+
+    if (httpAddress == null) {
+      throw new IllegalArgumentException(
+          KSMConfigKeys.OZONE_KSM_HTTP_ADDRESS_KEY + " must be defined. See" +
+              " https://wiki.apache.org/hadoop/Ozone#Configuration for" +
+              " details on configuring Ozone.");
+    }
+
+    HttpGet httpGet = new HttpGet("http://" + httpAddress + "/serviceList");
+    HttpEntity entity = executeHttpRequest(httpGet);
+    try {
+      String serviceListJson = EntityUtils.toString(entity);
+
+      ObjectMapper objectMapper = new ObjectMapper();
+      TypeReference<List<ServiceInfo>> serviceInfoReference =
+          new TypeReference<List<ServiceInfo>>() {
+          };
+      List<ServiceInfo> services = objectMapper.readValue(
+          serviceListJson, serviceInfoReference);
+
+      List<ServiceInfo> dataNodeInfos = services.stream().filter(
+          a -> a.getNodeType().equals(HddsProtos.NodeType.DATANODE))
+          .collect(Collectors.toList());
+
+      ServiceInfo restServer = selector.getRestServer(dataNodeInfos);
+
+      return NetUtils.createSocketAddr(restServer.getHostname() + ":" +
+          restServer.getPort(ServicePort.Type.HTTP));
+    } finally {
+      EntityUtils.consume(entity);
+    }
+  }
+
+  @Override
+  public void createVolume(String volumeName) throws IOException {
+    createVolume(volumeName, VolumeArgs.newBuilder().build());
+  }
+
+  @Override
+  public void createVolume(String volumeName, VolumeArgs volArgs)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      String owner = volArgs.getOwner() == null ?
+          ugi.getUserName() : volArgs.getOwner();
+      //TODO: support for ACLs has to be done in OzoneHandler (rest server)
+      /**
+      List<OzoneAcl> listOfAcls = new ArrayList<>();
+      //User ACL
+      listOfAcls.add(new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+          owner, userRights));
+      //ACLs from VolumeArgs
+      if(volArgs.getAcls() != null) {
+        listOfAcls.addAll(volArgs.getAcls());
+      }
+       */
+      builder.setPath(PATH_SEPARATOR + volumeName);
+
+      String quota = volArgs.getQuota();
+      if(quota != null) {
+        builder.setParameter(Header.OZONE_QUOTA_QUERY_TAG, quota);
+      }
+
+      HttpPost httpPost = new HttpPost(builder.build());
+      addOzoneHeaders(httpPost);
+      //use admin from VolumeArgs, if it's present
+      if(volArgs.getAdmin() != null) {
+        httpPost.removeHeaders(HttpHeaders.AUTHORIZATION);
+        httpPost.addHeader(HttpHeaders.AUTHORIZATION,
+            Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+                volArgs.getAdmin());
+      }
+      httpPost.addHeader(Header.OZONE_USER, owner);
+      LOG.info("Creating Volume: {}, with {} as owner and quota set to {}.",
+          volumeName, owner, quota == null ? "default" : quota);
+      EntityUtils.consume(executeHttpRequest(httpPost));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+
+  @Override
+  public void setVolumeOwner(String volumeName, String owner)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(owner);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName);
+      HttpPut httpPut = new HttpPut(builder.build());
+      addOzoneHeaders(httpPut);
+      httpPut.addHeader(Header.OZONE_USER, owner);
+      EntityUtils.consume(executeHttpRequest(httpPut));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void setVolumeQuota(String volumeName, OzoneQuota quota)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(quota);
+      String quotaString = quota.toString();
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName);
+      builder.setParameter(Header.OZONE_QUOTA_QUERY_TAG, quotaString);
+      HttpPut httpPut = new HttpPut(builder.build());
+      addOzoneHeaders(httpPut);
+      EntityUtils.consume(executeHttpRequest(httpPut));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public OzoneVolume getVolumeDetails(String volumeName)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName);
+      builder.setParameter(Header.OZONE_INFO_QUERY_TAG,
+          Header.OZONE_INFO_QUERY_VOLUME);
+      HttpGet httpGet = new HttpGet(builder.build());
+      addOzoneHeaders(httpGet);
+      HttpEntity response = executeHttpRequest(httpGet);
+      VolumeInfo volInfo =
+          VolumeInfo.parse(EntityUtils.toString(response));
+      //TODO: OzoneHandler in datanode has to be modified to send ACLs
+      OzoneVolume volume = new OzoneVolume(conf,
+          this,
+          volInfo.getVolumeName(),
+          volInfo.getCreatedBy(),
+          volInfo.getOwner().getName(),
+          volInfo.getQuota().sizeInBytes(),
+          HddsClientUtils.formatDateTime(volInfo.getCreatedOn()),
+          null);
+      EntityUtils.consume(response);
+      return volume;
+    } catch (URISyntaxException | ParseException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public boolean checkVolumeAccess(String volumeName, OzoneAcl acl)
+      throws IOException {
+    throw new UnsupportedOperationException("Not yet implemented.");
+  }
+
+  @Override
+  public void deleteVolume(String volumeName) throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName);
+      HttpDelete httpDelete = new HttpDelete(builder.build());
+      addOzoneHeaders(httpDelete);
+      EntityUtils.consume(executeHttpRequest(httpDelete));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public List<OzoneVolume> listVolumes(String volumePrefix, String prevKey,
+                                       int maxListResult)
+      throws IOException {
+    throw new UnsupportedOperationException("Not yet implemented.");
+  }
+
+  @Override
+  public List<OzoneVolume> listVolumes(String user, String volumePrefix,
+                                       String prevKey, int maxListResult)
+      throws IOException {
+    throw new UnsupportedOperationException("Not yet implemented.");
+  }
+
+  @Override
+  public void createBucket(String volumeName, String bucketName)
+      throws IOException {
+    createBucket(volumeName, bucketName, BucketArgs.newBuilder().build());
+  }
+
+  @Override
+  public void createBucket(
+      String volumeName, String bucketName, BucketArgs bucketArgs)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(bucketArgs);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      OzoneConsts.Versioning versioning = OzoneConsts.Versioning.DISABLED;
+      if(bucketArgs.getVersioning() != null &&
+          bucketArgs.getVersioning()) {
+        versioning = OzoneConsts.Versioning.ENABLED;
+      }
+      StorageType storageType = bucketArgs.getStorageType() == null ?
+          StorageType.DEFAULT : bucketArgs.getStorageType();
+
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName);
+      HttpPost httpPost = new HttpPost(builder.build());
+      addOzoneHeaders(httpPost);
+
+      //ACLs from BucketArgs
+      if(bucketArgs.getAcls() != null) {
+        for (OzoneAcl acl : bucketArgs.getAcls()) {
+          httpPost.addHeader(
+              Header.OZONE_ACLS, Header.OZONE_ACL_ADD + " " + acl.toString());
+        }
+      }
+      httpPost.addHeader(Header.OZONE_STORAGE_TYPE, storageType.toString());
+      httpPost.addHeader(Header.OZONE_BUCKET_VERSIONING,
+          versioning.toString());
+      LOG.info("Creating Bucket: {}/{}, with Versioning {} and Storage Type" +
+              " set to {}", volumeName, bucketName, versioning,
+          storageType);
+
+      EntityUtils.consume(executeHttpRequest(httpPost));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void addBucketAcls(
+      String volumeName, String bucketName, List<OzoneAcl> addAcls)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(addAcls);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName);
+      HttpPut httpPut = new HttpPut(builder.build());
+      addOzoneHeaders(httpPut);
+
+      for (OzoneAcl acl : addAcls) {
+        httpPut.addHeader(
+            Header.OZONE_ACLS, Header.OZONE_ACL_ADD + " " + acl.toString());
+      }
+      EntityUtils.consume(executeHttpRequest(httpPut));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void removeBucketAcls(
+      String volumeName, String bucketName, List<OzoneAcl> removeAcls)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(removeAcls);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName);
+      HttpPut httpPut = new HttpPut(builder.build());
+      addOzoneHeaders(httpPut);
+
+      for (OzoneAcl acl : removeAcls) {
+        httpPut.addHeader(
+            Header.OZONE_ACLS, Header.OZONE_ACL_REMOVE + " " + acl.toString());
+      }
+      EntityUtils.consume(executeHttpRequest(httpPut));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void setBucketVersioning(
+      String volumeName, String bucketName, Boolean versioning)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(versioning);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName);
+      HttpPut httpPut = new HttpPut(builder.build());
+      addOzoneHeaders(httpPut);
+
+      httpPut.addHeader(Header.OZONE_BUCKET_VERSIONING,
+          getBucketVersioning(versioning).toString());
+      EntityUtils.consume(executeHttpRequest(httpPut));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void setBucketStorageType(
+      String volumeName, String bucketName, StorageType storageType)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(storageType);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName);
+      HttpPut httpPut = new HttpPut(builder.build());
+      addOzoneHeaders(httpPut);
+
+      httpPut.addHeader(Header.OZONE_STORAGE_TYPE, storageType.toString());
+      EntityUtils.consume(executeHttpRequest(httpPut));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void deleteBucket(String volumeName, String bucketName)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName);
+      HttpDelete httpDelete = new HttpDelete(builder.build());
+      addOzoneHeaders(httpDelete);
+      EntityUtils.consume(executeHttpRequest(httpDelete));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void checkBucketAccess(String volumeName, String bucketName)
+      throws IOException {
+    throw new UnsupportedOperationException("Not yet implemented.");
+  }
+
+  @Override
+  public OzoneBucket getBucketDetails(String volumeName, String bucketName)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName);
+      builder.setParameter(Header.OZONE_INFO_QUERY_TAG,
+          Header.OZONE_INFO_QUERY_BUCKET);
+      HttpGet httpGet = new HttpGet(builder.build());
+      addOzoneHeaders(httpGet);
+      HttpEntity response = executeHttpRequest(httpGet);
+      BucketInfo bucketInfo =
+          BucketInfo.parse(EntityUtils.toString(response));
+      OzoneBucket bucket = new OzoneBucket(conf,
+          this,
+          bucketInfo.getVolumeName(),
+          bucketInfo.getBucketName(),
+          bucketInfo.getAcls(),
+          bucketInfo.getStorageType(),
+          getBucketVersioningFlag(bucketInfo.getVersioning()),
+          HddsClientUtils.formatDateTime(bucketInfo.getCreatedOn()));
+      EntityUtils.consume(response);
+      return bucket;
+    } catch (URISyntaxException | ParseException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public List<OzoneBucket> listBuckets(String volumeName, String bucketPrefix,
+                                       String prevBucket, int maxListResult)
+      throws IOException {
+    throw new UnsupportedOperationException("Not yet implemented.");
+  }
+
+  /**
+   * Writes a key in an existing bucket.
+   *
+   * @param volumeName Name of the Volume
+   * @param bucketName Name of the Bucket
+   * @param keyName Name of the Key
+   * @param size Size of the data
+   * @param type
+   * @param factor @return {@link OzoneOutputStream}
+   */
+  @Override
+  public OzoneOutputStream createKey(
+      String volumeName, String bucketName, String keyName, long size,
+      ReplicationType type, ReplicationFactor factor)
+      throws IOException {
+    // TODO: Once ReplicationType and ReplicationFactor are supported in
+    // OzoneHandler (in Datanode), set them in header.
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(keyName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName +
+          PATH_SEPARATOR + keyName);
+      HttpPut putRequest = new HttpPut(builder.build());
+      addOzoneHeaders(putRequest);
+      PipedInputStream in = new PipedInputStream();
+      OutputStream out = new PipedOutputStream(in);
+      putRequest.setEntity(new InputStreamEntity(in, size));
+      FutureTask<HttpEntity> futureTask =
+          new FutureTask<>(() -> executeHttpRequest(putRequest));
+      new Thread(futureTask).start();
+      OzoneOutputStream outputStream = new OzoneOutputStream(
+          new OutputStream() {
+            @Override
+            public void write(int b) throws IOException {
+              out.write(b);
+            }
+
+            @Override
+            public void close() throws IOException {
+              try {
+                out.close();
+                EntityUtils.consume(futureTask.get());
+              } catch (ExecutionException | InterruptedException e) {
+                throw new IOException(e);
+              }
+            }
+          });
+
+      return outputStream;
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public OzoneInputStream getKey(
+      String volumeName, String bucketName, String keyName)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(keyName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName +
+          PATH_SEPARATOR + keyName);
+      HttpGet getRequest = new HttpGet(builder.build());
+      addOzoneHeaders(getRequest);
+      HttpEntity entity = executeHttpRequest(getRequest);
+      PipedInputStream in = new PipedInputStream();
+      OutputStream out = new PipedOutputStream(in);
+      FutureTask<Void> futureTask =
+          new FutureTask<>(() -> {
+            entity.writeTo(out);
+            out.close();
+            return null;
+          });
+      new Thread(futureTask).start();
+      OzoneInputStream inputStream = new OzoneInputStream(
+          new InputStream() {
+
+            @Override
+            public int read() throws IOException {
+              return in.read();
+            }
+
+            @Override
+            public void close() throws IOException {
+              in.close();
+              EntityUtils.consume(entity);
+            }
+          });
+
+      return inputStream;
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public void deleteKey(String volumeName, String bucketName, String keyName)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(keyName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName + PATH_SEPARATOR + keyName);
+      HttpDelete httpDelete = new HttpDelete(builder.build());
+      addOzoneHeaders(httpDelete);
+      EntityUtils.consume(executeHttpRequest(httpDelete));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+  }
+
+  @Override
+  public List<OzoneKey> listKeys(String volumeName, String bucketName,
+                                 String keyPrefix, String prevKey,
+                                 int maxListResult)
+      throws IOException {
+    throw new UnsupportedOperationException("Not yet implemented.");
+  }
+
+  @Override
+  public OzoneKey getKeyDetails(
+      String volumeName, String bucketName, String keyName)
+      throws IOException {
+    try {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(keyName);
+      URIBuilder builder = new URIBuilder(ozoneRestUri);
+      builder.setPath(PATH_SEPARATOR + volumeName +
+          PATH_SEPARATOR + bucketName + PATH_SEPARATOR + keyName);
+      builder.setParameter(Header.OZONE_INFO_QUERY_TAG,
+          Header.OZONE_INFO_QUERY_KEY);
+      HttpGet httpGet = new HttpGet(builder.build());
+      addOzoneHeaders(httpGet);
+      HttpEntity response = executeHttpRequest(httpGet);
+      KeyInfo keyInfo =
+          KeyInfo.parse(EntityUtils.toString(response));
+      OzoneKey key = new OzoneKey(volumeName,
+          bucketName,
+          keyInfo.getKeyName(),
+          keyInfo.getSize(),
+          HddsClientUtils.formatDateTime(keyInfo.getCreatedOn()),
+          HddsClientUtils.formatDateTime(keyInfo.getModifiedOn()));
+      EntityUtils.consume(response);
+      return key;
+    } catch (URISyntaxException | ParseException e) {
+      throw new IOException(e);
+    }
+  }
+
+  /**
+   * Adds Ozone headers to http request.
+   *
+   * @param httpRequest Http Request
+   */
+  private void addOzoneHeaders(HttpUriRequest httpRequest) {
+    httpRequest.addHeader(HttpHeaders.AUTHORIZATION,
+        Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+            ugi.getUserName());
+    httpRequest.addHeader(HttpHeaders.DATE,
+        HddsClientUtils.formatDateTime(Time.monotonicNow()));
+    httpRequest.addHeader(Header.OZONE_VERSION_HEADER,
+        Header.OZONE_V1_VERSION_HEADER);
+  }
+
+  /**
+   * Sends the http request to server and returns the response HttpEntity.
+   * It's responsibility of the caller to consume and close response HttpEntity
+   * by calling {@code EntityUtils.consume}
+   *
+   * @param httpUriRequest http request
+   * @throws IOException
+   */
+  private HttpEntity executeHttpRequest(HttpUriRequest httpUriRequest)
+      throws IOException {
+    HttpResponse response = httpClient.execute(httpUriRequest);
+    int errorCode = response.getStatusLine().getStatusCode();
+    HttpEntity entity = response.getEntity();
+    if ((errorCode == HTTP_OK) || (errorCode == HTTP_CREATED)) {
+      return entity;
+    }
+    if (entity != null) {
+      throw new IOException(
+          OzoneException.parse(EntityUtils.toString(entity)));
+    } else {
+      throw new IOException("Unexpected null in http payload," +
+          " while processing request");
+    }
+  }
+
+  /**
+   * Converts OzoneConts.Versioning to boolean.
+   *
+   * @param version
+   * @return corresponding boolean value
+   */
+  private Boolean getBucketVersioningFlag(
+      OzoneConsts.Versioning version) {
+    if(version != null) {
+      switch(version) {
+      case ENABLED:
+        return true;
+      case NOT_DEFINED:
+      case DISABLED:
+      default:
+        return false;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Converts Bucket versioning flag into OzoneConts.Versioning.
+   *
+   * @param flag versioning flag
+   * @return corresponding OzoneConts.Versionin
+   */
+  private OzoneConsts.Versioning getBucketVersioning(Boolean flag) {
+    if(flag != null) {
+      if(flag) {
+        return OzoneConsts.Versioning.ENABLED;
+      } else {
+        return OzoneConsts.Versioning.DISABLED;
+      }
+    }
+    return OzoneConsts.Versioning.NOT_DEFINED;
+  }
+
+  @Override
+  public void close() throws IOException {
+    httpClient.close();
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/RestServerSelector.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/RestServerSelector.java
new file mode 100644
index 0000000..54e219b
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/RestServerSelector.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+
+import java.util.List;
+
+/**
+ * The implementor of this interface should select the REST server which will
+ * be used by the client to connect to Ozone Cluster, given list of
+ * REST Servers/DataNodes (DataNodes are the ones which hosts REST Service).
+ */
+public interface RestServerSelector {
+
+  /**
+   * Returns the REST Service which will be used by the client for connection.
+   *
+   * @param restServices list of available REST servers
+   * @return ServiceInfo
+   */
+  ServiceInfo getRestServer(List<ServiceInfo> restServices);
+
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/exceptions/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/exceptions/package-info.java
new file mode 100644
index 0000000..233e788
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/exceptions/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.client.rest.exceptions;
+
+/**
+ * This package contains ozone rest client libraries.
+ */
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/headers/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/headers/package-info.java
new file mode 100644
index 0000000..340709f
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/headers/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+@InterfaceAudience.Private
+package org.apache.hadoop.ozone.client.rest.headers;
+
+import org.apache.hadoop.classification.InterfaceAudience;
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/package-info.java
new file mode 100644
index 0000000..ebcc104
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rest/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+/**
+ * This package contains Ozone rest client library classes.
+ */
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/RpcClient.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/RpcClient.java
new file mode 100644
index 0000000..2464fe3
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/RpcClient.java
@@ -0,0 +1,567 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rpc;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ipc.Client;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ozone.KsmUtils;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.BucketArgs;
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneKey;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+import org.apache.hadoop.ozone.client.OzoneVolume;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.ozone.client.VolumeArgs;
+import org.apache.hadoop.ozone.client.io.ChunkGroupInputStream;
+import org.apache.hadoop.ozone.client.io.ChunkGroupOutputStream;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.ksm.protocolPB
+    .KeySpaceManagerProtocolClientSideTranslatorPB;
+import org.apache.hadoop.ozone.ksm.protocolPB
+    .KeySpaceManagerProtocolPB;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.ksm.KSMConfigKeys;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServicePort;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.protocolPB.KSMPBHelper;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolPB;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.UUID;
+import java.util.stream.Collectors;
+
+/**
+ * Ozone RPC Client Implementation, it connects to KSM, SCM and DataNode
+ * to execute client calls. This uses RPC protocol for communication
+ * with the servers.
+ */
+public class RpcClient implements ClientProtocol {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RpcClient.class);
+
+  private final OzoneConfiguration conf;
+  private final StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private final KeySpaceManagerProtocolClientSideTranslatorPB
+      keySpaceManagerClient;
+  private final XceiverClientManager xceiverClientManager;
+  private final int chunkSize;
+  private final UserGroupInformation ugi;
+  private final OzoneAcl.OzoneACLRights userRights;
+  private final OzoneAcl.OzoneACLRights groupRights;
+
+   /**
+    * Creates RpcClient instance with the given configuration.
+    * @param conf
+    * @throws IOException
+    */
+  public RpcClient(Configuration conf) throws IOException {
+    Preconditions.checkNotNull(conf);
+    this.conf = new OzoneConfiguration(conf);
+    this.ugi = UserGroupInformation.getCurrentUser();
+    this.userRights = conf.getEnum(KSMConfigKeys.OZONE_KSM_USER_RIGHTS,
+        KSMConfigKeys.OZONE_KSM_USER_RIGHTS_DEFAULT);
+    this.groupRights = conf.getEnum(KSMConfigKeys.OZONE_KSM_GROUP_RIGHTS,
+        KSMConfigKeys.OZONE_KSM_GROUP_RIGHTS_DEFAULT);
+    long ksmVersion =
+        RPC.getProtocolVersion(KeySpaceManagerProtocolPB.class);
+    InetSocketAddress ksmAddress = KsmUtils
+        .getKsmAddressForClients(conf);
+    RPC.setProtocolEngine(conf, KeySpaceManagerProtocolPB.class,
+        ProtobufRpcEngine.class);
+    this.keySpaceManagerClient =
+        new KeySpaceManagerProtocolClientSideTranslatorPB(
+            RPC.getProxy(KeySpaceManagerProtocolPB.class, ksmVersion,
+                ksmAddress, UserGroupInformation.getCurrentUser(), conf,
+                NetUtils.getDefaultSocketFactory(conf),
+                Client.getRpcTimeout(conf)));
+
+    long scmVersion =
+        RPC.getProtocolVersion(StorageContainerLocationProtocolPB.class);
+    InetSocketAddress scmAddress = getScmAddressForClient();
+    RPC.setProtocolEngine(conf, StorageContainerLocationProtocolPB.class,
+        ProtobufRpcEngine.class);
+    this.storageContainerLocationClient =
+        new StorageContainerLocationProtocolClientSideTranslatorPB(
+            RPC.getProxy(StorageContainerLocationProtocolPB.class, scmVersion,
+                scmAddress, UserGroupInformation.getCurrentUser(), conf,
+                NetUtils.getDefaultSocketFactory(conf),
+                Client.getRpcTimeout(conf)));
+
+    this.xceiverClientManager = new XceiverClientManager(conf);
+
+    int configuredChunkSize = conf.getInt(
+        ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY,
+        ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_DEFAULT);
+    if(configuredChunkSize > ScmConfigKeys.OZONE_SCM_CHUNK_MAX_SIZE) {
+      LOG.warn("The chunk size ({}) is not allowed to be more than"
+              + " the maximum size ({}),"
+              + " resetting to the maximum size.",
+          configuredChunkSize, ScmConfigKeys.OZONE_SCM_CHUNK_MAX_SIZE);
+      chunkSize = ScmConfigKeys.OZONE_SCM_CHUNK_MAX_SIZE;
+    } else {
+      chunkSize = configuredChunkSize;
+    }
+  }
+
+  private InetSocketAddress getScmAddressForClient() throws IOException {
+    List<ServiceInfo> services = keySpaceManagerClient.getServiceList();
+    ServiceInfo scmInfo = services.stream().filter(
+        a -> a.getNodeType().equals(HddsProtos.NodeType.SCM))
+        .collect(Collectors.toList()).get(0);
+    return NetUtils.createSocketAddr(scmInfo.getHostname()+ ":" +
+        scmInfo.getPort(ServicePort.Type.RPC));
+  }
+
+  @Override
+  public void createVolume(String volumeName) throws IOException {
+    createVolume(volumeName, VolumeArgs.newBuilder().build());
+  }
+
+  @Override
+  public void createVolume(String volumeName, VolumeArgs volArgs)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(volArgs);
+
+    String admin = volArgs.getAdmin() == null ?
+        ugi.getUserName() : volArgs.getAdmin();
+    String owner = volArgs.getOwner() == null ?
+        ugi.getUserName() : volArgs.getOwner();
+    long quota = volArgs.getQuota() == null ?
+        OzoneConsts.MAX_QUOTA_IN_BYTES :
+        OzoneQuota.parseQuota(volArgs.getQuota()).sizeInBytes();
+    List<OzoneAcl> listOfAcls = new ArrayList<>();
+    //User ACL
+    listOfAcls.add(new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+            owner, userRights));
+    //Group ACLs of the User
+    List<String> userGroups = Arrays.asList(UserGroupInformation
+        .createRemoteUser(owner).getGroupNames());
+    userGroups.stream().forEach((group) -> listOfAcls.add(
+        new OzoneAcl(OzoneAcl.OzoneACLType.GROUP, group, groupRights)));
+    //ACLs from VolumeArgs
+    if(volArgs.getAcls() != null) {
+      listOfAcls.addAll(volArgs.getAcls());
+    }
+
+    KsmVolumeArgs.Builder builder = KsmVolumeArgs.newBuilder();
+    builder.setVolume(volumeName);
+    builder.setAdminName(admin);
+    builder.setOwnerName(owner);
+    builder.setQuotaInBytes(quota);
+
+    //Remove duplicates and add ACLs
+    for (OzoneAcl ozoneAcl :
+        listOfAcls.stream().distinct().collect(Collectors.toList())) {
+      builder.addOzoneAcls(KSMPBHelper.convertOzoneAcl(ozoneAcl));
+    }
+
+    LOG.info("Creating Volume: {}, with {} as owner and quota set to {} bytes.",
+        volumeName, owner, quota);
+    keySpaceManagerClient.createVolume(builder.build());
+  }
+
+  @Override
+  public void setVolumeOwner(String volumeName, String owner)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(owner);
+    keySpaceManagerClient.setOwner(volumeName, owner);
+  }
+
+  @Override
+  public void setVolumeQuota(String volumeName, OzoneQuota quota)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(quota);
+    long quotaInBytes = quota.sizeInBytes();
+    keySpaceManagerClient.setQuota(volumeName, quotaInBytes);
+  }
+
+  @Override
+  public OzoneVolume getVolumeDetails(String volumeName)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    KsmVolumeArgs volume = keySpaceManagerClient.getVolumeInfo(volumeName);
+    return new OzoneVolume(
+        conf,
+        this,
+        volume.getVolume(),
+        volume.getAdminName(),
+        volume.getOwnerName(),
+        volume.getQuotaInBytes(),
+        volume.getCreationTime(),
+        volume.getAclMap().ozoneAclGetProtobuf().stream().
+            map(KSMPBHelper::convertOzoneAcl).collect(Collectors.toList()));
+  }
+
+  @Override
+  public boolean checkVolumeAccess(String volumeName, OzoneAcl acl)
+      throws IOException {
+    throw new UnsupportedOperationException("Not yet implemented.");
+  }
+
+  @Override
+  public void deleteVolume(String volumeName) throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    keySpaceManagerClient.deleteVolume(volumeName);
+  }
+
+  @Override
+  public List<OzoneVolume> listVolumes(String volumePrefix, String prevVolume,
+                                       int maxListResult)
+      throws IOException {
+    List<KsmVolumeArgs> volumes = keySpaceManagerClient.listAllVolumes(
+        volumePrefix, prevVolume, maxListResult);
+
+    return volumes.stream().map(volume -> new OzoneVolume(
+        conf,
+        this,
+        volume.getVolume(),
+        volume.getAdminName(),
+        volume.getOwnerName(),
+        volume.getQuotaInBytes(),
+        volume.getCreationTime(),
+        volume.getAclMap().ozoneAclGetProtobuf().stream().
+            map(KSMPBHelper::convertOzoneAcl).collect(Collectors.toList())))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public List<OzoneVolume> listVolumes(String user, String volumePrefix,
+                                       String prevVolume, int maxListResult)
+      throws IOException {
+    List<KsmVolumeArgs> volumes = keySpaceManagerClient.listVolumeByUser(
+        user, volumePrefix, prevVolume, maxListResult);
+
+    return volumes.stream().map(volume -> new OzoneVolume(
+        conf,
+        this,
+        volume.getVolume(),
+        volume.getAdminName(),
+        volume.getOwnerName(),
+        volume.getQuotaInBytes(),
+        volume.getCreationTime(),
+        volume.getAclMap().ozoneAclGetProtobuf().stream().
+            map(KSMPBHelper::convertOzoneAcl).collect(Collectors.toList())))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public void createBucket(String volumeName, String bucketName)
+      throws IOException {
+    createBucket(volumeName, bucketName, BucketArgs.newBuilder().build());
+  }
+
+  @Override
+  public void createBucket(
+      String volumeName, String bucketName, BucketArgs bucketArgs)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(bucketArgs);
+
+    Boolean isVersionEnabled = bucketArgs.getVersioning() == null ?
+        Boolean.FALSE : bucketArgs.getVersioning();
+    StorageType storageType = bucketArgs.getStorageType() == null ?
+        StorageType.DEFAULT : bucketArgs.getStorageType();
+    List<OzoneAcl> listOfAcls = new ArrayList<>();
+    //User ACL
+    listOfAcls.add(new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+        ugi.getUserName(), userRights));
+    //Group ACLs of the User
+    List<String> userGroups = Arrays.asList(UserGroupInformation
+        .createRemoteUser(ugi.getUserName()).getGroupNames());
+    userGroups.stream().forEach((group) -> listOfAcls.add(
+        new OzoneAcl(OzoneAcl.OzoneACLType.GROUP, group, groupRights)));
+    //ACLs from BucketArgs
+    if(bucketArgs.getAcls() != null) {
+      listOfAcls.addAll(bucketArgs.getAcls());
+    }
+
+    KsmBucketInfo.Builder builder = KsmBucketInfo.newBuilder();
+    builder.setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setIsVersionEnabled(isVersionEnabled)
+        .setStorageType(storageType)
+        .setAcls(listOfAcls.stream().distinct().collect(Collectors.toList()));
+
+    LOG.info("Creating Bucket: {}/{}, with Versioning {} and " +
+            "Storage Type set to {}", volumeName, bucketName, isVersionEnabled,
+            storageType);
+    keySpaceManagerClient.createBucket(builder.build());
+  }
+
+  @Override
+  public void addBucketAcls(
+      String volumeName, String bucketName, List<OzoneAcl> addAcls)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(addAcls);
+    KsmBucketArgs.Builder builder = KsmBucketArgs.newBuilder();
+    builder.setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setAddAcls(addAcls);
+    keySpaceManagerClient.setBucketProperty(builder.build());
+  }
+
+  @Override
+  public void removeBucketAcls(
+      String volumeName, String bucketName, List<OzoneAcl> removeAcls)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(removeAcls);
+    KsmBucketArgs.Builder builder = KsmBucketArgs.newBuilder();
+    builder.setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setRemoveAcls(removeAcls);
+    keySpaceManagerClient.setBucketProperty(builder.build());
+  }
+
+  @Override
+  public void setBucketVersioning(
+      String volumeName, String bucketName, Boolean versioning)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(versioning);
+    KsmBucketArgs.Builder builder = KsmBucketArgs.newBuilder();
+    builder.setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setIsVersionEnabled(versioning);
+    keySpaceManagerClient.setBucketProperty(builder.build());
+  }
+
+  @Override
+  public void setBucketStorageType(
+      String volumeName, String bucketName, StorageType storageType)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(storageType);
+    KsmBucketArgs.Builder builder = KsmBucketArgs.newBuilder();
+    builder.setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setStorageType(storageType);
+    keySpaceManagerClient.setBucketProperty(builder.build());
+  }
+
+  @Override
+  public void deleteBucket(
+      String volumeName, String bucketName) throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    keySpaceManagerClient.deleteBucket(volumeName, bucketName);
+  }
+
+  @Override
+  public void checkBucketAccess(
+      String volumeName, String bucketName) throws IOException {
+
+  }
+
+  @Override
+  public OzoneBucket getBucketDetails(
+      String volumeName, String bucketName) throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    KsmBucketInfo bucketArgs =
+        keySpaceManagerClient.getBucketInfo(volumeName, bucketName);
+    return new OzoneBucket(
+        conf,
+        this,
+        bucketArgs.getVolumeName(),
+        bucketArgs.getBucketName(),
+        bucketArgs.getAcls(),
+        bucketArgs.getStorageType(),
+        bucketArgs.getIsVersionEnabled(),
+        bucketArgs.getCreationTime());
+  }
+
+  @Override
+  public List<OzoneBucket> listBuckets(String volumeName, String bucketPrefix,
+                                       String prevBucket, int maxListResult)
+      throws IOException {
+    List<KsmBucketInfo> buckets = keySpaceManagerClient.listBuckets(
+        volumeName, prevBucket, bucketPrefix, maxListResult);
+
+    return buckets.stream().map(bucket -> new OzoneBucket(
+        conf,
+        this,
+        bucket.getVolumeName(),
+        bucket.getBucketName(),
+        bucket.getAcls(),
+        bucket.getStorageType(),
+        bucket.getIsVersionEnabled(),
+        bucket.getCreationTime()))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public OzoneOutputStream createKey(
+      String volumeName, String bucketName, String keyName, long size,
+      ReplicationType type, ReplicationFactor factor)
+      throws IOException {
+    String requestId = UUID.randomUUID().toString();
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .setDataSize(size)
+        .setType(HddsProtos.ReplicationType.valueOf(type.toString()))
+        .setFactor(HddsProtos.ReplicationFactor.valueOf(factor.getValue()))
+        .build();
+
+    OpenKeySession openKey = keySpaceManagerClient.openKey(keyArgs);
+    ChunkGroupOutputStream groupOutputStream =
+        new ChunkGroupOutputStream.Builder()
+            .setHandler(openKey)
+            .setXceiverClientManager(xceiverClientManager)
+            .setScmClient(storageContainerLocationClient)
+            .setKsmClient(keySpaceManagerClient)
+            .setChunkSize(chunkSize)
+            .setRequestID(requestId)
+            .setType(HddsProtos.ReplicationType.valueOf(type.toString()))
+            .setFactor(HddsProtos.ReplicationFactor.valueOf(factor.getValue()))
+            .build();
+    groupOutputStream.addPreallocateBlocks(
+        openKey.getKeyInfo().getLatestVersionLocations(),
+        openKey.getOpenVersion());
+    return new OzoneOutputStream(groupOutputStream);
+  }
+
+  @Override
+  public OzoneInputStream getKey(
+      String volumeName, String bucketName, String keyName)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(keyName);
+    String requestId = UUID.randomUUID().toString();
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .build();
+    KsmKeyInfo keyInfo = keySpaceManagerClient.lookupKey(keyArgs);
+    LengthInputStream lengthInputStream =
+        ChunkGroupInputStream.getFromKsmKeyInfo(
+            keyInfo, xceiverClientManager, storageContainerLocationClient,
+            requestId);
+    return new OzoneInputStream(
+        (ChunkGroupInputStream)lengthInputStream.getWrappedStream());
+  }
+
+  @Override
+  public void deleteKey(
+      String volumeName, String bucketName, String keyName)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(keyName);
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .build();
+    keySpaceManagerClient.deleteKey(keyArgs);
+  }
+
+  @Override
+  public List<OzoneKey> listKeys(String volumeName, String bucketName,
+                                 String keyPrefix, String prevKey,
+                                 int maxListResult)
+      throws IOException {
+    List<KsmKeyInfo> keys = keySpaceManagerClient.listKeys(
+        volumeName, bucketName, prevKey, keyPrefix, maxListResult);
+
+    return keys.stream().map(key -> new OzoneKey(
+        key.getVolumeName(),
+        key.getBucketName(),
+        key.getKeyName(),
+        key.getDataSize(),
+        key.getCreationTime(),
+        key.getModificationTime()))
+        .collect(Collectors.toList());
+  }
+
+  @Override
+  public OzoneKey getKeyDetails(
+      String volumeName, String bucketName, String keyName)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    Preconditions.checkNotNull(keyName);
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .build();
+    KsmKeyInfo keyInfo = keySpaceManagerClient.lookupKey(keyArgs);
+    return new OzoneKey(keyInfo.getVolumeName(),
+                        keyInfo.getBucketName(),
+                        keyInfo.getKeyName(),
+                        keyInfo.getDataSize(),
+                        keyInfo.getCreationTime(),
+                        keyInfo.getModificationTime());
+  }
+
+  @Override
+  public void close() throws IOException {
+    IOUtils.cleanupWithLogger(LOG, storageContainerLocationClient);
+    IOUtils.cleanupWithLogger(LOG, keySpaceManagerClient);
+    IOUtils.cleanupWithLogger(LOG, xceiverClientManager);
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/package-info.java
new file mode 100644
index 0000000..0fcc3fc
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/rpc/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rpc;
+
+/**
+ * This package contains Ozone rpc client library classes.
+ */
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneBucket.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneBucket.java
new file mode 100644
index 0000000..e6fe0ec
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneBucket.java
@@ -0,0 +1,645 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.io.IOUtils;
+
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.response.ListKeys;
+
+import static org.apache.hadoop.hdds.server.ServerUtils.releaseConnection;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.FileEntity;
+import org.apache.http.entity.InputStreamEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.util.EntityUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Strings;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.util.LinkedList;
+import java.util.List;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.web.utils.OzoneUtils.ENCODING;
+import static org.apache.hadoop.ozone.web.utils.OzoneUtils.ENCODING_NAME;
+
+/**
+ * A Bucket class the represents an Ozone Bucket.
+ */
+public class OzoneBucket {
+  static final Logger LOG = LoggerFactory.getLogger(OzoneBucket.class);
+
+  private BucketInfo bucketInfo;
+  private OzoneVolume volume;
+
+  /**
+   * Constructor for bucket.
+   *
+   * @param info   - BucketInfo
+   * @param volume - OzoneVolume Object that contains this bucket
+   */
+  public OzoneBucket(BucketInfo info, OzoneVolume volume) {
+    this.bucketInfo = info;
+    this.volume = volume;
+  }
+
+  /**
+   * Gets bucket Info.
+   *
+   * @return BucketInfo
+   */
+  public BucketInfo getBucketInfo() {
+    return bucketInfo;
+  }
+
+  /**
+   * Sets Bucket Info.
+   *
+   * @param bucketInfo BucketInfo
+   */
+  public void setBucketInfo(BucketInfo bucketInfo) {
+    this.bucketInfo = bucketInfo;
+  }
+
+  /**
+   * Returns the parent volume class.
+   *
+   * @return - OzoneVolume
+   */
+  OzoneVolume getVolume() {
+    return volume;
+  }
+
+  /**
+   * Returns bucket name.
+   *
+   * @return Bucket Name
+   */
+  public String getBucketName() {
+    return bucketInfo.getBucketName();
+  }
+
+  /**
+   * Returns the Acls on the bucket.
+   *
+   * @return - Acls
+   */
+  public List<OzoneAcl> getAcls() {
+    return bucketInfo.getAcls();
+  }
+
+  /**
+   * Return versioning info on the bucket - Enabled or disabled.
+   *
+   * @return - Version Enum
+   */
+  public OzoneConsts.Versioning getVersioning() {
+    return bucketInfo.getVersioning();
+  }
+
+  /**
+   * Gets the Storage class for the bucket.
+   *
+   * @return Storage Class Enum
+   */
+  public StorageType getStorageType() {
+    return bucketInfo.getStorageType();
+  }
+
+  /**
+   * Gets the creation time of the bucket.
+   *
+   * @return String
+   */
+  public String getCreatedOn() {
+    return bucketInfo.getCreatedOn();
+  }
+
+  /**
+   * Puts an Object in Ozone bucket.
+   *
+   * @param keyName - Name of the key
+   * @param data    - Data that you want to put
+   * @throws OzoneException
+   */
+  public void putKey(String keyName, String data) throws OzoneException {
+    if ((keyName == null) || keyName.isEmpty()) {
+      throw new OzoneRestClientException("Invalid key Name.");
+    }
+
+    if (data == null) {
+      throw new OzoneRestClientException("Invalid data.");
+    }
+
+    HttpPut putRequest = null;
+    InputStream is = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      URIBuilder builder = new URIBuilder(volume.getClient().getEndPointURI());
+      builder.setPath("/" + getVolume().getVolumeName() + "/" + getBucketName()
+          + "/" + keyName).build();
+
+      putRequest = getVolume().getClient().getHttpPut(builder.toString());
+
+      is = new ByteArrayInputStream(data.getBytes(ENCODING));
+      putRequest.setEntity(new InputStreamEntity(is, data.length()));
+      is.mark(data.length());
+      try {
+        putRequest.setHeader(Header.CONTENT_MD5, DigestUtils.md5Hex(is));
+      } finally {
+        is.reset();
+      }
+      executePutKey(putRequest, httpClient);
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      IOUtils.closeStream(is);
+      releaseConnection(putRequest);
+    }
+  }
+
+  /**
+   * Puts an Object in Ozone Bucket.
+   *
+   * @param dataFile - File from which you want the data to be put. Key Name
+   *                 will same as the file name, devoid of any path.
+   * @throws OzoneException
+   */
+  public void putKey(File dataFile) throws OzoneException {
+    if (dataFile == null) {
+      throw new OzoneRestClientException("Invalid file object.");
+    }
+    String keyName = dataFile.getName();
+    putKey(keyName, dataFile);
+  }
+
+  /**
+   * Puts a Key in Ozone Bucket.
+   *
+   * @param keyName - Name of the Key
+   * @param file    - Stream that gets read to be put into Ozone.
+   * @throws OzoneException
+   */
+  public void putKey(String keyName, File file)
+      throws OzoneException {
+
+    if ((keyName == null) || keyName.isEmpty()) {
+      throw new OzoneRestClientException("Invalid key Name");
+    }
+
+    if (file == null) {
+      throw new OzoneRestClientException("Invalid data stream");
+    }
+
+    HttpPut putRequest = null;
+    FileInputStream fis = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      URIBuilder builder = new URIBuilder(volume.getClient().getEndPointURI());
+      builder.setPath("/" + getVolume().getVolumeName() + "/" + getBucketName()
+          + "/" + keyName).build();
+
+      putRequest = getVolume().getClient().getHttpPut(builder.toString());
+
+      FileEntity fileEntity = new FileEntity(file, ContentType
+          .APPLICATION_OCTET_STREAM);
+      putRequest.setEntity(fileEntity);
+
+      fis = new FileInputStream(file);
+      putRequest.setHeader(Header.CONTENT_MD5, DigestUtils.md5Hex(fis));
+      executePutKey(putRequest, httpClient);
+
+    } catch (IOException | URISyntaxException ex) {
+      final OzoneRestClientException orce = new OzoneRestClientException(
+          "Failed to putKey: keyName=" + keyName + ", file=" + file);
+      orce.initCause(ex);
+      LOG.trace("", orce);
+      throw orce;
+    } finally {
+      IOUtils.closeStream(fis);
+      releaseConnection(putRequest);
+    }
+  }
+
+  /**
+   * executePutKey executes the Put request against the Ozone Server.
+   *
+   * @param putRequest - Http Put Request
+   * @param httpClient - httpClient
+   * @throws OzoneException
+   * @throws IOException
+   */
+  public static void executePutKey(HttpPut putRequest,
+      CloseableHttpClient httpClient) throws OzoneException, IOException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(putRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if ((errorCode == HTTP_OK) || (errorCode == HTTP_CREATED)) {
+        return;
+      }
+
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+
+      throw OzoneException.parse(EntityUtils.toString(entity));
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Gets a key from the Ozone server and writes to the file pointed by the
+   * downloadTo PAth.
+   *
+   * @param keyName    - Key Name in Ozone.
+   * @param downloadTo File Name to download the Key's Data to
+   */
+  public void getKey(String keyName, Path downloadTo) throws OzoneException {
+
+    if ((keyName == null) || keyName.isEmpty()) {
+      throw new OzoneRestClientException("Invalid key Name");
+    }
+
+    if (downloadTo == null) {
+      throw new OzoneRestClientException("Invalid download path");
+    }
+
+    FileOutputStream outPutFile = null;
+    HttpGet getRequest = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      outPutFile = new FileOutputStream(downloadTo.toFile());
+
+      URIBuilder builder = new URIBuilder(volume.getClient().getEndPointURI());
+      builder.setPath("/" + getVolume().getVolumeName() + "/" + getBucketName()
+          + "/" + keyName).build();
+
+      getRequest = getVolume().getClient().getHttpGet(builder.toString());
+      executeGetKey(getRequest, httpClient, outPutFile);
+      outPutFile.flush();
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      IOUtils.closeStream(outPutFile);
+      releaseConnection(getRequest);
+    }
+  }
+
+  /**
+   * Returns the data part of the key as a string.
+   *
+   * @param keyName - KeyName to get
+   * @return String - Data
+   * @throws OzoneException
+   */
+  public String getKey(String keyName) throws OzoneException {
+
+    if ((keyName == null) || keyName.isEmpty()) {
+      throw new OzoneRestClientException("Invalid key Name");
+    }
+
+    HttpGet getRequest = null;
+    ByteArrayOutputStream outPutStream = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      outPutStream = new ByteArrayOutputStream();
+
+      URIBuilder builder = new URIBuilder(volume.getClient().getEndPointURI());
+
+      builder.setPath("/" + getVolume().getVolumeName() + "/" + getBucketName()
+          + "/" + keyName).build();
+
+      getRequest = getVolume().getClient().getHttpGet(builder.toString());
+      executeGetKey(getRequest, httpClient, outPutStream);
+      return outPutStream.toString(ENCODING_NAME);
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      IOUtils.closeStream(outPutStream);
+      releaseConnection(getRequest);
+    }
+
+  }
+
+  /**
+   * Executes get key and returns the data.
+   *
+   * @param getRequest - http Get Request
+   * @param httpClient - Client
+   * @param stream     - Stream to write data to.
+   * @throws IOException
+   * @throws OzoneException
+   */
+  public static void executeGetKey(HttpGet getRequest,
+      CloseableHttpClient httpClient, OutputStream stream)
+      throws IOException, OzoneException {
+
+    HttpEntity entity = null;
+    try {
+
+      HttpResponse response = httpClient.execute(getRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if (errorCode == HTTP_OK) {
+        entity.writeTo(stream);
+        return;
+      }
+
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+
+      throw OzoneException.parse(EntityUtils.toString(entity));
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Deletes a key in this bucket.
+   *
+   * @param keyName - Name of the Key
+   * @throws OzoneException
+   */
+  public void deleteKey(String keyName) throws OzoneException {
+
+    if ((keyName == null) || keyName.isEmpty()) {
+      throw new OzoneRestClientException("Invalid key Name");
+    }
+
+    HttpDelete deleteRequest = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      URIBuilder builder = new URIBuilder(volume.getClient().getEndPointURI());
+      builder.setPath("/" + getVolume().getVolumeName() + "/" + getBucketName()
+          + "/" + keyName).build();
+
+      deleteRequest = getVolume()
+          .getClient().getHttpDelete(builder.toString());
+      executeDeleteKey(deleteRequest, httpClient);
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(deleteRequest);
+    }
+  }
+
+  /**
+   * Executes deleteKey.
+   *
+   * @param deleteRequest - http Delete Request
+   * @param httpClient    - Client
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private void executeDeleteKey(HttpDelete deleteRequest,
+      CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+
+    HttpEntity entity = null;
+    try {
+
+      HttpResponse response = httpClient.execute(deleteRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if (errorCode == HTTP_OK) {
+        return;
+      }
+
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+
+      throw OzoneException.parse(EntityUtils.toString(entity));
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * List all keys in a bucket.
+   *
+   * @param resultLength The max length of listing result.
+   * @param previousKey The key from where listing should start,
+   *                    this key is excluded in the result.
+   * @param prefix The prefix that return list keys start with.
+   * @return List of OzoneKeys
+   * @throws OzoneException
+   */
+  public List<OzoneKey> listKeys(String resultLength, String previousKey,
+      String prefix) throws OzoneException {
+    HttpGet getRequest = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      OzoneRestClient client = getVolume().getClient();
+      URIBuilder builder = new URIBuilder(volume.getClient().getEndPointURI());
+      builder.setPath("/" + getVolume().getVolumeName() + "/" + getBucketName())
+          .build();
+
+      if (!Strings.isNullOrEmpty(resultLength)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_MAXKEYS, resultLength);
+      }
+
+      if (!Strings.isNullOrEmpty(previousKey)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREVKEY, previousKey);
+      }
+
+      if (!Strings.isNullOrEmpty(prefix)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREFIX, prefix);
+      }
+
+      final String uri = builder.toString();
+      getRequest = client.getHttpGet(uri);
+      LOG.trace("listKeys URI={}", uri);
+      return executeListKeys(getRequest, httpClient);
+
+    } catch (IOException | URISyntaxException e) {
+      throw new OzoneRestClientException(e.getMessage(), e);
+    } finally {
+      releaseConnection(getRequest);
+    }
+  }
+
+  /**
+   * List keys in a bucket with the provided prefix, with paging results.
+   *
+   * @param prefix The prefix of the object keys
+   * @param maxResult max size per response
+   * @param prevKey the previous key for paging
+   */
+  public List<OzoneKey> listKeys(String prefix, int maxResult, String prevKey)
+      throws OzoneException {
+    HttpGet getRequest = null;
+    try {
+      final URI uri =  new URIBuilder(volume.getClient().getEndPointURI())
+          .setPath(OzoneConsts.KSM_KEY_PREFIX + getVolume().getVolumeName() +
+              OzoneConsts.KSM_KEY_PREFIX + getBucketName())
+          .setParameter(Header.OZONE_LIST_QUERY_PREFIX, prefix)
+          .setParameter(Header.OZONE_LIST_QUERY_MAXKEYS,
+              String.valueOf(maxResult))
+          .setParameter(Header.OZONE_LIST_QUERY_PREVKEY, prevKey)
+          .build();
+      final OzoneRestClient client = getVolume().getClient();
+      getRequest = client.getHttpGet(uri.toString());
+      return executeListKeys(getRequest, HttpClientBuilder.create().build());
+    } catch (IOException | URISyntaxException e) {
+      throw new OzoneRestClientException(e.getMessage());
+    } finally {
+      releaseConnection(getRequest);
+    }
+  }
+
+  /**
+   * Execute list Key.
+   *
+   * @param getRequest - HttpGet
+   * @param httpClient - HttpClient
+   * @return List<OzoneKey>
+   * @throws IOException
+   * @throws OzoneException
+   */
+  public static List<OzoneKey> executeListKeys(HttpGet getRequest,
+      CloseableHttpClient httpClient) throws IOException, OzoneException {
+    HttpEntity entity = null;
+    List<OzoneKey> ozoneKeyList = new LinkedList<OzoneKey>();
+    try {
+      HttpResponse response = httpClient.execute(getRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+
+      entity = response.getEntity();
+
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+      if (errorCode == HTTP_OK) {
+        String temp = EntityUtils.toString(entity);
+        ListKeys keyList = ListKeys.parse(temp);
+
+        for (KeyInfo info : keyList.getKeyList()) {
+          ozoneKeyList.add(new OzoneKey(info));
+        }
+        return ozoneKeyList;
+
+      } else {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Get info of the specified key.
+   */
+  public OzoneKey getKeyInfo(String keyName) throws OzoneException {
+    if ((keyName == null) || keyName.isEmpty()) {
+      throw new OzoneRestClientException(
+          "Unable to get key info, key name is null or empty");
+    }
+
+    HttpGet getRequest = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      OzoneRestClient client = getVolume().getClient();
+      URIBuilder builder = new URIBuilder(volume.getClient().getEndPointURI());
+      builder
+          .setPath("/" + getVolume().getVolumeName() + "/" + getBucketName()
+              + "/" + keyName)
+          .setParameter(Header.OZONE_INFO_QUERY_TAG,
+              Header.OZONE_INFO_QUERY_KEY)
+          .build();
+
+      getRequest = client.getHttpGet(builder.toString());
+      return executeGetKeyInfo(getRequest, httpClient);
+    } catch (IOException | URISyntaxException e) {
+      throw new OzoneRestClientException(e.getMessage(), e);
+    } finally {
+      releaseConnection(getRequest);
+    }
+  }
+
+  /**
+   * Execute get Key info.
+   *
+   * @param getRequest - HttpGet
+   * @param httpClient - HttpClient
+   * @return List<OzoneKey>
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private OzoneKey executeGetKeyInfo(HttpGet getRequest,
+      CloseableHttpClient httpClient) throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(getRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+
+      if (errorCode == HTTP_OK) {
+        OzoneKey key = new OzoneKey(
+            KeyInfo.parse(EntityUtils.toString(entity)));
+        return key;
+      }
+      throw OzoneException.parse(EntityUtils.toString(entity));
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneKey.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneKey.java
new file mode 100644
index 0000000..5a3a0c4
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneKey.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+
+/**
+ * Client side representation of an ozone Key.
+ */
+public class OzoneKey {
+  private KeyInfo keyInfo;
+
+  /**
+   * Constructor for Ozone Key.
+   * @param keyInfo - Key Info
+   */
+  public OzoneKey(KeyInfo keyInfo) {
+    this.keyInfo = keyInfo;
+  }
+
+  /**
+   * Returns Key Info.
+   * @return Object Info
+   */
+  public KeyInfo getObjectInfo() {
+    return keyInfo;
+  }
+
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneRestClient.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneRestClient.java
new file mode 100644
index 0000000..6d0bbf4
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneRestClient.java
@@ -0,0 +1,803 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.util.Time;
+
+import static org.apache.hadoop.hdds.server.ServerUtils.releaseConnection;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.FileEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.util.EntityUtils;
+
+import javax.ws.rs.core.HttpHeaders;
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+
+/**
+ * Ozone client that connects to an Ozone server. Please note that this class is
+ * not  thread safe.
+ */
+public class OzoneRestClient implements Closeable {
+  private URI endPointURI;
+  private String userAuth;
+
+  /**
+   * Constructor for OzoneRestClient.
+   */
+  public OzoneRestClient() {
+  }
+
+  /**
+   * Constructor for OzoneRestClient.
+   */
+  public OzoneRestClient(String ozoneURI)
+      throws OzoneException, URISyntaxException {
+    setEndPoint(ozoneURI);
+  }
+
+  /**
+   * Constructor for OzoneRestClient.
+   */
+  public OzoneRestClient(String ozoneURI, String userAuth)
+      throws OzoneException, URISyntaxException {
+    setEndPoint(ozoneURI);
+    setUserAuth(userAuth);
+  }
+
+  /**
+   * Returns the end Point.
+   *
+   * @return String
+   */
+  public URI getEndPointURI() {
+    return endPointURI;
+  }
+
+  /**
+   * Sets the End Point info using an URI.
+   *
+   * @param endPointURI - URI
+   * @throws OzoneException
+   */
+  public void setEndPointURI(URI endPointURI) throws OzoneException {
+    if ((endPointURI == null) || (endPointURI.toString().isEmpty())) {
+      throw new OzoneRestClientException("Invalid ozone URI");
+    }
+    this.endPointURI = endPointURI;
+  }
+
+  /**
+   * Set endPoint.
+   *
+   * @param clusterFQDN - cluster FQDN.
+   */
+  public void setEndPoint(String clusterFQDN) throws
+      OzoneException, URISyntaxException {
+    setEndPointURI(new URI(clusterFQDN));
+  }
+
+  /**
+   * Get user Auth String.
+   *
+   * @return - User Auth String
+   */
+  public String getUserAuth() {
+    return this.userAuth;
+  }
+
+  /**
+   * Set User Auth.
+   *
+   * @param userAuth - User Auth String
+   */
+  public void setUserAuth(String userAuth) {
+    this.userAuth = userAuth;
+  }
+
+  /**
+   * create volume.
+   *
+   * @param volumeName - volume name 3 - 63 chars, small letters.
+   * @param onBehalfOf - The user on behalf we are making the call for
+   * @param quota      - Quota's are specified in a specific format. it is
+   *                   integer(MB|GB|TB), for example 100TB.
+   * @throws OzoneRestClientException
+   */
+  public OzoneVolume createVolume(String volumeName, String onBehalfOf,
+                                  String quota) throws OzoneException {
+    HttpPost httpPost = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(volumeName);
+
+      URIBuilder builder = new URIBuilder(endPointURI);
+      builder.setPath("/" + volumeName);
+      if (quota != null) {
+        builder.setParameter(Header.OZONE_QUOTA_QUERY_TAG, quota);
+      }
+
+      httpPost = getHttpPost(onBehalfOf, builder.build().toString());
+      executeCreateVolume(httpPost, httpClient);
+      return getVolume(volumeName);
+    } catch (IOException | URISyntaxException | IllegalArgumentException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(httpPost);
+    }
+  }
+
+  /**
+   * Returns information about an existing Volume. if the Volume does not exist,
+   * or if the user does not have access rights OzoneException is thrown
+   *
+   * @param volumeName - volume name 3 - 63 chars, small letters.
+   * @return OzoneVolume Ozone Client Volume Class.
+   * @throws OzoneException
+   */
+  public OzoneVolume getVolume(String volumeName) throws OzoneException {
+    HttpGet httpGet = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(volumeName);
+      URIBuilder builder = new URIBuilder(endPointURI);
+      builder.setPath("/" + volumeName)
+          .setParameter(Header.OZONE_INFO_QUERY_TAG,
+              Header.OZONE_INFO_QUERY_VOLUME)
+          .build();
+
+      httpGet = getHttpGet(builder.toString());
+      return executeInfoVolume(httpGet, httpClient);
+    } catch (IOException | URISyntaxException | IllegalArgumentException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(httpGet);
+    }
+  }
+
+  /**
+   * List all the volumes owned by the user or Owned by the user specified in
+   * the behalf of string.
+   *
+   * @param onBehalfOf
+   *  User Name of the user if it is not the caller. for example,
+   *  an admin wants to list some other users volumes.
+   * @param prefix
+   *   Return only volumes that match this prefix.
+   * @param maxKeys
+   *   Maximum number of results to return, if the result set
+   *   is smaller than requested size, it means that list is
+   *   complete.
+   * @param previousVolume
+   *   The previous volume name.
+   * @return List of Volumes
+   * @throws OzoneException
+   */
+  public List<OzoneVolume> listVolumes(String onBehalfOf, String prefix,
+      int maxKeys, String previousVolume) throws OzoneException {
+    HttpGet httpGet = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      URIBuilder builder = new URIBuilder(endPointURI);
+      if (!Strings.isNullOrEmpty(prefix)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREFIX, prefix);
+      }
+
+      if (maxKeys > 0) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_MAXKEYS, Integer
+            .toString(maxKeys));
+      }
+
+      if (!Strings.isNullOrEmpty(previousVolume)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREVKEY,
+            previousVolume);
+      }
+
+      builder.setPath("/").build();
+
+      httpGet = getHttpGet(builder.toString());
+      if (onBehalfOf != null) {
+        httpGet.addHeader(Header.OZONE_USER, onBehalfOf);
+      }
+      return executeListVolume(httpGet, httpClient);
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(httpGet);
+    }
+  }
+
+  /**
+   * List all the volumes owned by the user or Owned by the user specified in
+   * the behalf of string.
+   *
+   * @param onBehalfOf - User Name of the user if it is not the caller. for
+   *                   example, an admin wants to list some other users
+   *                   volumes.
+   * @param prefix     - Return only volumes that match this prefix.
+   * @param maxKeys    - Maximum number of results to return, if the result set
+   *                   is smaller than requested size, it means that list is
+   *                   complete.
+   * @param prevKey    - The last key that client got, server will continue
+   *                   returning results from that point.
+   * @return List of Volumes
+   * @throws OzoneException
+   */
+  public List<OzoneVolume> listVolumes(String onBehalfOf, String prefix,
+      int maxKeys, OzoneVolume prevKey) throws OzoneException {
+    String volumeName = null;
+
+    if (prevKey != null) {
+      volumeName = prevKey.getVolumeName();
+    }
+
+    return listVolumes(onBehalfOf, prefix, maxKeys, volumeName);
+  }
+
+  /**
+   * List volumes of the current user or if onBehalfof is not null lists volume
+   * owned by that user. You need admin privilege to read other users volume
+   * lists.
+   *
+   * @param onBehalfOf - Name of the user you want to get volume list
+   * @return - Volume list.
+   * @throws OzoneException
+   */
+  public List<OzoneVolume> listVolumes(String onBehalfOf)
+      throws OzoneException {
+    return listVolumes(onBehalfOf, null,
+        Integer.parseInt(Header.OZONE_DEFAULT_LIST_SIZE), StringUtils.EMPTY);
+  }
+
+  /**
+   * List all volumes in a cluster. This can be invoked only by an Admin.
+   *
+   * @param prefix  - Returns only volumes that match this prefix.
+   * @param maxKeys - Maximum niumber of keys to return
+   * @param prevKey - Last Ozone Volume from the last Iteration.
+   * @return List of Volumes
+   * @throws OzoneException
+   */
+  public List<OzoneVolume> listAllVolumes(String prefix, int maxKeys,
+      OzoneVolume prevKey) throws OzoneException {
+    HttpGet httpGet = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      URIBuilder builder = new URIBuilder(endPointURI);
+      if (prefix != null) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREFIX, prefix);
+      }
+
+      if (maxKeys > 0) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_MAXKEYS, Integer
+            .toString(maxKeys));
+      }
+
+      if (prevKey != null) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREVKEY,
+            prevKey.getOwnerName()+ "/" + prevKey.getVolumeName());
+      }
+
+      builder.addParameter(Header.OZONE_LIST_QUERY_ROOTSCAN, "true");
+      builder.setPath("/").build();
+      httpGet = getHttpGet(builder.toString());
+      return executeListVolume(httpGet, httpClient);
+
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(httpGet);
+    }
+  }
+
+    /**
+     * delete a given volume.
+     *
+     * @param volumeName - volume to be deleted.
+     * @throws OzoneException - Ozone Exception
+     */
+  public void deleteVolume(String volumeName) throws OzoneException {
+    HttpDelete httpDelete = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(volumeName);
+      URIBuilder builder = new URIBuilder(endPointURI);
+      builder.setPath("/" + volumeName).build();
+
+      httpDelete = getHttpDelete(builder.toString());
+      executeDeleteVolume(httpDelete, httpClient);
+    } catch (IOException | URISyntaxException | IllegalArgumentException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(httpDelete);
+    }
+  }
+
+  /**
+   * Sets the Volume Owner.
+   *
+   * @param volumeName - Volume Name
+   * @param newOwner   - New Owner Name
+   * @throws OzoneException
+   */
+  public void setVolumeOwner(String volumeName, String newOwner)
+      throws OzoneException {
+    HttpPut putRequest = null;
+    if (newOwner == null || newOwner.isEmpty()) {
+      throw new OzoneRestClientException("Invalid new owner name");
+    }
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(volumeName);
+      URIBuilder builder = new URIBuilder(endPointURI);
+      builder.setPath("/" + volumeName).build();
+
+      putRequest = getHttpPut(builder.toString());
+      putRequest.addHeader(Header.OZONE_USER, newOwner);
+      executePutVolume(putRequest, httpClient);
+
+    } catch (URISyntaxException | IllegalArgumentException | IOException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(putRequest);
+    }
+  }
+
+  /**
+   * Sets the Volume Quota. Quota's are specified in a specific format. it is
+   * <integer>|(MB|GB|TB. for example 100TB.
+   * <p>
+   * To Remove a quota you can specify Header.OZONE_QUOTA_REMOVE
+   *
+   * @param volumeName - volume name
+   * @param quota      - Quota String or  Header.OZONE_QUOTA_REMOVE
+   * @throws OzoneException
+   */
+  public void setVolumeQuota(String volumeName, String quota)
+      throws OzoneException {
+    if (quota == null || quota.isEmpty()) {
+      throw new OzoneRestClientException("Invalid quota");
+    }
+    HttpPut putRequest = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(volumeName);
+      URIBuilder builder = new URIBuilder(endPointURI);
+      builder.setPath("/" + volumeName)
+          .setParameter(Header.OZONE_QUOTA_QUERY_TAG, quota)
+          .build();
+
+      putRequest = getHttpPut(builder.toString());
+      executePutVolume(putRequest, httpClient);
+
+    } catch (URISyntaxException | IllegalArgumentException | IOException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(putRequest);
+    }
+  }
+
+  /**
+   * Sends the create Volume request to the server.
+   *
+   * @param httppost   - http post class
+   * @param httpClient - httpClient
+   * @throws IOException    -
+   * @throws OzoneException
+   */
+  private void executeCreateVolume(HttpPost httppost,
+      final CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(httppost);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if ((errorCode == HTTP_OK) || (errorCode == HTTP_CREATED)) {
+        return;
+      }
+
+      if (entity != null) {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      } else {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consume(entity);
+      }
+    }
+  }
+
+  /**
+   * Sends the create Volume request to the server.
+   *
+   * @param httpGet - httpGet
+   * @return OzoneVolume
+   * @throws IOException    -
+   * @throws OzoneException
+   */
+  private OzoneVolume executeInfoVolume(HttpGet httpGet,
+      final CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(httpGet);
+      int errorCode = response.getStatusLine().getStatusCode();
+
+      entity = response.getEntity();
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+
+      if (errorCode == HTTP_OK) {
+        OzoneVolume volume = new OzoneVolume(this);
+        volume.setVolumeInfo(EntityUtils.toString(entity));
+        return volume;
+      } else {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Sends update volume requests to the server.
+   *
+   * @param putRequest http request
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private void executePutVolume(HttpPut putRequest,
+      final CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(putRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+      if (errorCode != HTTP_OK) {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consume(entity);
+      }
+    }
+  }
+
+  /**
+   * List Volumes.
+   *
+   * @param httpGet - httpGet
+   * @return OzoneVolume
+   * @throws IOException    -
+   * @throws OzoneException
+   */
+  private List<OzoneVolume> executeListVolume(HttpGet httpGet,
+      final CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    List<OzoneVolume> volList = new LinkedList<>();
+    try {
+      HttpResponse response = httpClient.execute(httpGet);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+
+      String temp = EntityUtils.toString(entity);
+      if (errorCode == HTTP_OK) {
+        ListVolumes listVolumes =
+            ListVolumes.parse(temp);
+
+        for (VolumeInfo info : listVolumes.getVolumes()) {
+          volList.add(new OzoneVolume(info, this));
+        }
+        return volList;
+
+      } else {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Delete Volume.
+   *
+   * @param httpDelete - Http Delete Request
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private void executeDeleteVolume(HttpDelete httpDelete,
+      final CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(httpDelete);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if (errorCode != HTTP_OK) {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Puts a Key in Ozone Bucket.
+   *
+   * @param volumeName - Name of the Volume
+   * @param bucketName - Name of the Bucket
+   * @param keyName - Name of the Key
+   * @param file    - Stream that gets read to be put into Ozone.
+   * @throws OzoneException
+   */
+  public void putKey(String volumeName, String bucketName, String keyName,
+      File file) throws OzoneException {
+    OzoneUtils.verifyResourceName(volumeName);
+    OzoneUtils.verifyResourceName(bucketName);
+
+    if (StringUtils.isEmpty(keyName)) {
+      throw new OzoneRestClientException("Invalid key Name");
+    }
+
+    if (file == null) {
+      throw new OzoneRestClientException("Invalid data stream");
+    }
+
+    HttpPut putRequest = null;
+    FileInputStream fis = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      URIBuilder builder = new URIBuilder(getEndPointURI());
+      builder.setPath("/" + volumeName + "/" + bucketName + "/" + keyName)
+          .build();
+
+      putRequest = getHttpPut(builder.toString());
+
+      FileEntity fileEntity = new FileEntity(file, ContentType
+          .APPLICATION_OCTET_STREAM);
+      putRequest.setEntity(fileEntity);
+
+      fis = new FileInputStream(file);
+      putRequest.setHeader(Header.CONTENT_MD5, DigestUtils.md5Hex(fis));
+      OzoneBucket.executePutKey(putRequest, httpClient);
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      IOUtils.closeStream(fis);
+      releaseConnection(putRequest);
+    }
+  }
+
+  /**
+   * Gets a key from the Ozone server and writes to the file pointed by the
+   * downloadTo Path.
+   *
+   * @param volumeName - Volume Name in Ozone.
+   * @param bucketName - Bucket Name in Ozone.
+   * @param keyName - Key Name in Ozone.
+   * @param downloadTo File Name to download the Key's Data to
+   */
+  public void getKey(String volumeName, String bucketName, String keyName,
+      Path downloadTo) throws OzoneException {
+    OzoneUtils.verifyResourceName(volumeName);
+    OzoneUtils.verifyResourceName(bucketName);
+
+    if (StringUtils.isEmpty(keyName)) {
+      throw new OzoneRestClientException("Invalid key Name");
+    }
+
+    if (downloadTo == null) {
+      throw new OzoneRestClientException("Invalid download path");
+    }
+
+    FileOutputStream outPutFile = null;
+    HttpGet getRequest = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      outPutFile = new FileOutputStream(downloadTo.toFile());
+
+      URIBuilder builder = new URIBuilder(getEndPointURI());
+      builder.setPath("/" + volumeName + "/" + bucketName + "/" + keyName)
+          .build();
+
+      getRequest = getHttpGet(builder.toString());
+      OzoneBucket.executeGetKey(getRequest, httpClient, outPutFile);
+      outPutFile.flush();
+    } catch (IOException | URISyntaxException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      IOUtils.closeStream(outPutFile);
+      releaseConnection(getRequest);
+    }
+  }
+
+  /**
+   * List all keys in the given bucket.
+   *
+   * @param volumeName - Volume name
+   * @param bucketName - Bucket name
+   * @param resultLength The max length of listing result.
+   * @param previousKey The key from where listing should start,
+   *                    this key is excluded in the result.
+   * @param prefix The prefix that return list keys start with.
+   *
+   * @return List of OzoneKeys
+   */
+  public List<OzoneKey> listKeys(String volumeName, String bucketName,
+      String resultLength, String previousKey, String prefix)
+      throws OzoneException {
+    OzoneUtils.verifyResourceName(volumeName);
+    OzoneUtils.verifyResourceName(bucketName);
+
+    HttpGet getRequest = null;
+    try (CloseableHttpClient httpClient = HddsClientUtils.newHttpClient()) {
+      URIBuilder builder = new URIBuilder(getEndPointURI());
+      builder.setPath("/" + volumeName + "/" + bucketName).build();
+
+      if (!Strings.isNullOrEmpty(resultLength)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_MAXKEYS, resultLength);
+      }
+
+      if (!Strings.isNullOrEmpty(previousKey)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREVKEY, previousKey);
+      }
+
+      if (!Strings.isNullOrEmpty(prefix)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREFIX, prefix);
+      }
+
+      getRequest = getHttpGet(builder.toString());
+      return OzoneBucket.executeListKeys(getRequest, httpClient);
+    } catch (IOException | URISyntaxException e) {
+      throw new OzoneRestClientException(e.getMessage(), e);
+    } finally {
+      releaseConnection(getRequest);
+    }
+  }
+
+  /**
+   * Returns a standard HttpPost Object to use for ozone post requests.
+   *
+   * @param onBehalfOf - If the use is being made on behalf of user, that user
+   * @param uriString  - UriString
+   * @return HttpPost
+   */
+  public HttpPost getHttpPost(String onBehalfOf, String uriString) {
+    HttpPost httpPost = new HttpPost(uriString);
+    addOzoneHeaders(httpPost);
+    if (onBehalfOf != null) {
+      httpPost.addHeader(Header.OZONE_USER, onBehalfOf);
+    }
+    return httpPost;
+  }
+
+  /**
+   * Returns a standard HttpGet Object to use for ozone Get requests.
+   *
+   * @param uriString - The full Uri String
+   * @return HttpGet
+   */
+  public HttpGet getHttpGet(String uriString) {
+    HttpGet httpGet = new HttpGet(uriString);
+    addOzoneHeaders(httpGet);
+    return httpGet;
+  }
+
+  /**
+   * Returns httpDelete.
+   *
+   * @param uriString - uri
+   * @return HttpDelete
+   */
+  public HttpDelete getHttpDelete(String uriString) {
+    HttpDelete httpDel = new HttpDelete(uriString);
+    addOzoneHeaders(httpDel);
+    return httpDel;
+  }
+
+  /**
+   * returns an HttpPut Object.
+   *
+   * @param uriString - Uri
+   * @return HttpPut
+   */
+  public HttpPut getHttpPut(String uriString) {
+    HttpPut httpPut = new HttpPut(uriString);
+    addOzoneHeaders(httpPut);
+    return httpPut;
+  }
+
+  /**
+   * Add Ozone Headers.
+   *
+   * @param httpRequest - Http Request
+   */
+  private void addOzoneHeaders(HttpRequestBase httpRequest) {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+
+    httpRequest.addHeader(Header.OZONE_VERSION_HEADER,
+        Header.OZONE_V1_VERSION_HEADER);
+    httpRequest.addHeader(HttpHeaders.DATE,
+        format.format(new Date(Time.monotonicNow())));
+    if (getUserAuth() != null) {
+      httpRequest.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              getUserAuth());
+    }
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated with it. If
+   * the stream is already closed then invoking this method has no effect.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+    // TODO : Currently we create a new HTTP client. We should switch
+    // This to a Pool and cleanup the pool here.
+  }
+
+  @VisibleForTesting
+  public CloseableHttpClient newHttpClient() {
+    return HddsClientUtils.newHttpClient();
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneRestClientException.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneRestClientException.java
new file mode 100644
index 0000000..dfb2357
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneRestClientException.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+
+/**
+ * This exception is thrown by the Ozone Clients.
+ */
+public class OzoneRestClientException extends OzoneException {
+  /**
+   * Constructor that allows the shortMessage.
+   *
+   * @param shortMessage Short Message
+   */
+  public OzoneRestClientException(String shortMessage) {
+    super(0, shortMessage, shortMessage);
+  }
+
+  /**
+   * Constructor that allows a shortMessage and an exception.
+   *
+   * @param shortMessage short message
+   * @param ex exception
+   */
+  public OzoneRestClientException(String shortMessage, Exception ex) {
+    super(0, shortMessage, shortMessage, ex);
+  }
+
+  /**
+   * Constructor that allows the shortMessage and a longer message.
+   *
+   * @param shortMessage Short Message
+   * @param message long error message
+   */
+  public OzoneRestClientException(String shortMessage, String message) {
+    super(0, shortMessage, message);
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneVolume.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneVolume.java
new file mode 100644
index 0000000..6728e68
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/OzoneVolume.java
@@ -0,0 +1,583 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.client;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.ListBuckets;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+
+import static org.apache.hadoop.hdds.server.ServerUtils.releaseConnection;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+
+/**
+ * Ozone Volume Class.
+ */
+public class OzoneVolume {
+  private VolumeInfo volumeInfo;
+  private Map<String, String> headerMap;
+  private final OzoneRestClient client;
+
+  /**
+   * Constructor for OzoneVolume.
+   */
+  public OzoneVolume(OzoneRestClient client) {
+    this.client = client;
+    this.headerMap = new HashMap<>();
+  }
+
+  /**
+   * Constructor for OzoneVolume.
+   *
+   * @param volInfo - volume Info.
+   * @param client  Client
+   */
+  public OzoneVolume(VolumeInfo volInfo, OzoneRestClient client) {
+    this.volumeInfo = volInfo;
+    this.client = client;
+  }
+
+  /**
+   * Returns a Json String of this class.
+   * @return String
+   * @throws IOException
+   */
+  public String getJsonString() throws IOException {
+    return volumeInfo.toJsonString();
+  }
+
+  /**
+   * sets the Volume Info.
+   *
+   * @param volInfoString - Volume Info String
+   */
+  public void setVolumeInfo(String volInfoString) throws IOException {
+    this.volumeInfo = VolumeInfo.parse(volInfoString);
+  }
+
+  /**
+   * @return the volume info.
+   */
+  public VolumeInfo getVolumeInfo() {
+    return this.volumeInfo;
+  }
+
+  /**
+   * Returns volume Name.
+   *
+   * @return Volume Name.
+   */
+  public String getVolumeName() {
+    return this.volumeInfo.getVolumeName();
+  }
+
+  /**
+   * Get created by.
+   *
+   * @return String
+   */
+  public String getCreatedby() {
+    return this.volumeInfo.getCreatedBy();
+  }
+
+  /**
+   * returns the Owner name.
+   *
+   * @return String
+   */
+  public String getOwnerName() {
+    return this.volumeInfo.getOwner().getName();
+  }
+
+  /**
+   * Returns Quota Info.
+   *
+   * @return Quota
+   */
+  public OzoneQuota getQuota() {
+    return volumeInfo.getQuota();
+  }
+
+  /**
+   * Returns creation time of Volume.
+   *
+   * @return String
+   */
+  public String getCreatedOn() {
+    return volumeInfo.getCreatedOn();
+  }
+
+  /**
+   * Returns a Http header from the Last Volume related call.
+   *
+   * @param headerName - Name of the header
+   * @return - Header Value
+   */
+  public String getHeader(String headerName) {
+    return headerMap.get(headerName);
+  }
+
+  /**
+   * Gets the Client, this is used by Bucket and Key Classes.
+   *
+   * @return - Ozone Client
+   */
+  OzoneRestClient getClient() {
+    return client;
+  }
+
+  /**
+   * Create Bucket - Creates a bucket under a given volume.
+   *
+   * @param bucketName - Bucket Name
+   * @param acls - Acls - User Acls
+   * @param storageType - Storage Class
+   * @param versioning - enable versioning support on a bucket.
+   *
+   *
+   * @return - a Ozone Bucket Object
+   */
+  public OzoneBucket createBucket(String bucketName, String[] acls,
+                                  StorageType storageType,
+                                  OzoneConsts.Versioning versioning)
+      throws OzoneException {
+
+    HttpPost httpPost = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(bucketName);
+      URIBuilder builder = new URIBuilder(getClient().getEndPointURI());
+      builder.setPath("/" + getVolumeName() + "/" + bucketName).build();
+
+      httpPost = client.getHttpPost(null, builder.toString());
+      if (acls != null) {
+        for (String acl : acls) {
+          httpPost
+              .addHeader(Header.OZONE_ACLS, Header.OZONE_ACL_ADD + " " + acl);
+        }
+      }
+
+      httpPost.addHeader(Header.OZONE_STORAGE_TYPE, storageType.toString());
+      httpPost.addHeader(Header.OZONE_BUCKET_VERSIONING, versioning.toString());
+      executeCreateBucket(httpPost, httpClient);
+      return getBucket(bucketName);
+    } catch (IOException | URISyntaxException | IllegalArgumentException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(httpPost);
+    }
+  }
+
+  /**
+   * Create Bucket.
+   *
+   * @param bucketName - bucket name
+   * @param acls - acls
+   * @param storageType - storage class
+   *
+   * @throws OzoneException
+   */
+  public OzoneBucket createBucket(String bucketName, String[] acls,
+                                  StorageType storageType)
+      throws OzoneException {
+    return createBucket(bucketName, acls, storageType,
+        OzoneConsts.Versioning.DISABLED);
+  }
+
+  /**
+   * Create Bucket.
+   *
+   * @param bucketName - bucket name
+   * @param acls - acls
+   *
+   * @throws OzoneException
+   */
+  public OzoneBucket createBucket(String bucketName, String[] acls)
+      throws OzoneException {
+    return createBucket(bucketName, acls, StorageType.DEFAULT,
+        OzoneConsts.Versioning.DISABLED);
+  }
+
+
+  /**
+   * Create Bucket.
+   *
+   * @param bucketName - bucket name
+   *
+   * @throws OzoneException
+   */
+  public OzoneBucket createBucket(String bucketName) throws OzoneException {
+    return createBucket(bucketName, null,  StorageType.DEFAULT,
+        OzoneConsts.Versioning.DISABLED);
+  }
+
+
+  /**
+   * execute a Create Bucket Request against Ozone server.
+   *
+   * @param httppost - httpPost
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private void executeCreateBucket(HttpPost httppost,
+      CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(httppost);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+      if ((errorCode == HTTP_OK) || (errorCode == HTTP_CREATED)) {
+        return;
+      }
+
+      if (entity != null) {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      } else {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Adds Acls to an existing bucket.
+   *
+   * @param bucketName - Name of the bucket
+   * @param acls - Acls
+   *
+   * @throws OzoneException
+   */
+  public void addAcls(String bucketName, String[] acls) throws OzoneException {
+    HttpPut putRequest = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(bucketName);
+      URIBuilder builder = new URIBuilder(getClient().getEndPointURI());
+      builder.setPath("/" + getVolumeName() + "/" + bucketName).build();
+      putRequest = client.getHttpPut(builder.toString());
+
+      for (String acl : acls) {
+        putRequest
+            .addHeader(Header.OZONE_ACLS, Header.OZONE_ACL_ADD + " " + acl);
+      }
+      executePutBucket(putRequest, httpClient);
+    } catch (URISyntaxException | IOException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(putRequest);
+    }
+  }
+
+  /**
+   * Removes ACLs from a bucket.
+   *
+   * @param bucketName - Bucket Name
+   * @param acls - Acls to be removed
+   *
+   * @throws OzoneException
+   */
+  public void removeAcls(String bucketName, String[] acls)
+      throws OzoneException {
+    HttpPut putRequest = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(bucketName);
+      URIBuilder builder = new URIBuilder(getClient().getEndPointURI());
+      builder.setPath("/" + getVolumeName() + "/" + bucketName).build();
+      putRequest = client.getHttpPut(builder.toString());
+
+      for (String acl : acls) {
+        putRequest
+            .addHeader(Header.OZONE_ACLS, Header.OZONE_ACL_REMOVE + " " + acl);
+      }
+      executePutBucket(putRequest, httpClient);
+    } catch (URISyntaxException | IOException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(putRequest);
+    }
+  }
+
+  /**
+   * Returns information about an existing bucket.
+   *
+   * @param bucketName - BucketName
+   *
+   * @return OZoneBucket
+   */
+  public OzoneBucket getBucket(String bucketName) throws OzoneException {
+    HttpGet getRequest = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(bucketName);
+      URIBuilder builder = new URIBuilder(getClient().getEndPointURI());
+      builder.setPath("/" + getVolumeName() + "/" + bucketName)
+        .setParameter(Header.OZONE_INFO_QUERY_TAG,
+            Header.OZONE_INFO_QUERY_BUCKET).build();
+      getRequest = client.getHttpGet(builder.toString());
+      return executeInfoBucket(getRequest, httpClient);
+
+    } catch (IOException | URISyntaxException | IllegalArgumentException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(getRequest);
+    }
+  }
+
+
+  /**
+   * Execute the info bucket call.
+   *
+   * @param getRequest - httpGet Request
+   * @param httpClient - Http Client
+   *
+   * @return OzoneBucket
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private OzoneBucket executeInfoBucket(HttpGet getRequest,
+      CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(getRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+      if ((errorCode == HTTP_OK) || (errorCode == HTTP_CREATED)) {
+        OzoneBucket bucket =
+            new OzoneBucket(BucketInfo.parse(EntityUtils.toString(entity)),
+                this);
+        return bucket;
+      }
+      throw OzoneException.parse(EntityUtils.toString(entity));
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Execute the put bucket call.
+   *
+   * @param putRequest - http put request
+   * @param httpClient - Http Client
+   *
+   * @return OzoneBucket
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private void executePutBucket(HttpPut putRequest,
+      CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(putRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if (errorCode == HTTP_OK) {
+        return;
+      }
+
+      if (entity != null) {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      }
+
+      throw new OzoneRestClientException("Unexpected null in http result");
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Gets a list of buckets on this volume.
+   *
+   * @return - List of buckets
+   *
+   * @throws OzoneException
+   */
+  public List<OzoneBucket> listBuckets(String resultLength,
+      String previousBucket, String prefix) throws OzoneException {
+    HttpGet getRequest = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      URIBuilder builder = new URIBuilder(getClient().getEndPointURI());
+      builder.setPath("/" + getVolumeName()).build();
+      if (!Strings.isNullOrEmpty(resultLength)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_MAXKEYS, resultLength);
+      }
+      if (!Strings.isNullOrEmpty(previousBucket)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREVKEY, previousBucket);
+      }
+      if (!Strings.isNullOrEmpty(prefix)) {
+        builder.addParameter(Header.OZONE_LIST_QUERY_PREFIX, prefix);
+      }
+
+      getRequest = client.getHttpGet(builder.toString());
+      return executeListBuckets(getRequest, httpClient);
+
+    } catch (IOException | URISyntaxException e) {
+      throw new OzoneRestClientException(e.getMessage(), e);
+    } finally {
+      releaseConnection(getRequest);
+    }
+  }
+
+  /**
+   * executes the List Bucket Call.
+   *
+   * @param getRequest - http Request
+   * @param httpClient - http Client
+   *
+   * @return List of OzoneBuckets
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private List<OzoneBucket> executeListBuckets(HttpGet getRequest,
+      CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    List<OzoneBucket> ozoneBucketList = new LinkedList<OzoneBucket>();
+    try {
+      HttpResponse response = httpClient.execute(getRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+
+      entity = response.getEntity();
+
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload");
+      }
+      if (errorCode == HTTP_OK) {
+        ListBuckets bucketList =
+            ListBuckets.parse(EntityUtils.toString(entity));
+
+        for (BucketInfo info : bucketList.getBuckets()) {
+          ozoneBucketList.add(new OzoneBucket(info, this));
+        }
+        return ozoneBucketList;
+
+      } else {
+        throw OzoneException.parse(EntityUtils.toString(entity));
+      }
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  /**
+   * Delete an empty bucket.
+   *
+   * @param bucketName - Name of the bucket to delete
+   *
+   * @throws OzoneException
+   */
+  public void deleteBucket(String bucketName) throws OzoneException {
+    HttpDelete delRequest = null;
+    try (CloseableHttpClient httpClient = newHttpClient()) {
+      OzoneUtils.verifyResourceName(bucketName);
+      URIBuilder builder = new URIBuilder(getClient().getEndPointURI());
+      builder.setPath("/" + getVolumeName() + "/" + bucketName).build();
+
+      delRequest = client.getHttpDelete(builder.toString());
+      executeDeleteBucket(delRequest, httpClient);
+
+    } catch (IOException | URISyntaxException | IllegalArgumentException ex) {
+      throw new OzoneRestClientException(ex.getMessage(), ex);
+    } finally {
+      releaseConnection(delRequest);
+    }
+  }
+
+  /**
+   * Executes delete bucket call.
+   *
+   * @param delRequest - Delete Request
+   * @param httpClient - Http Client
+7   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  private void executeDeleteBucket(HttpDelete delRequest,
+      CloseableHttpClient httpClient)
+      throws IOException, OzoneException {
+    HttpEntity entity = null;
+    try {
+      HttpResponse response = httpClient.execute(delRequest);
+      int errorCode = response.getStatusLine().getStatusCode();
+      entity = response.getEntity();
+
+      if (errorCode == HTTP_OK) {
+        return;
+      }
+
+      if (entity == null) {
+        throw new OzoneRestClientException("Unexpected null in http payload.");
+      }
+
+      throw OzoneException.parse(EntityUtils.toString(entity));
+
+    } finally {
+      if (entity != null) {
+        EntityUtils.consumeQuietly(entity);
+      }
+    }
+  }
+
+  @VisibleForTesting
+  public CloseableHttpClient newHttpClient() {
+    return HddsClientUtils.newHttpClient();
+  }
+}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/package-info.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/package-info.java
new file mode 100644
index 0000000..046568b
--- /dev/null
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/web/client/package-info.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+
+/**
+ * Ozone client library is a java client for the Ozone
+ * Object Store.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+/**
+ This library is  a simple Ozone REST Library.
+
+ This library is a very *minimal* client written for tests and
+ command line utils that work against Ozone. It does not have
+ things like thread-pools and support for extended security models yet.
+
+ OzoneClients return OzoneVolumes and OzoneVolumes return OzoneBuckets.
+ **/
diff --git a/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/TestHddsClientUtils.java b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/TestHddsClientUtils.java
new file mode 100644
index 0000000..a270f61
--- /dev/null
+++ b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/TestHddsClientUtils.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.ksm.KSMConfigKeys;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.rules.Timeout;
+
+import java.net.InetSocketAddress;
+
+import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients;
+import static org.apache.hadoop.ozone.KsmUtils.getKsmAddress;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
+
+/**
+ * This test class verifies the parsing of SCM endpoint config settings. The
+ * parsing logic is in {@link org.apache.hadoop.hdds.scm.client.HddsClientUtils}.
+ */
+public class TestHddsClientUtils {
+  @Rule
+  public Timeout timeout = new Timeout(300000);
+
+  @Rule
+  public ExpectedException thrown= ExpectedException.none();
+
+  /**
+   * Verify client endpoint lookup failure if it is not configured.
+   */
+  @Test
+  public void testMissingScmClientAddress() {
+    final Configuration conf = new OzoneConfiguration();
+    thrown.expect(IllegalArgumentException.class);
+    getScmAddressForClients(conf);
+  }
+
+  /**
+   * Verify that the client endpoint can be correctly parsed from
+   * configuration.
+   */
+  @Test
+  public void testGetScmClientAddress() {
+    final Configuration conf = new OzoneConfiguration();
+
+    // First try a client address with just a host name. Verify it falls
+    // back to the default port.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4");
+    InetSocketAddress addr = getScmAddressForClients(conf);
+    assertThat(addr.getHostString(), is("1.2.3.4"));
+    assertThat(addr.getPort(), is(ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT));
+
+    // Next try a client address with a host name and port. Verify both
+    // are used correctly.
+    conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "1.2.3.4:100");
+    addr = getScmAddressForClients(conf);
+    assertThat(addr.getHostString(), is("1.2.3.4"));
+    assertThat(addr.getPort(), is(100));
+  }
+
+  @Test
+  public void testGetKSMAddress() {
+    final Configuration conf = new OzoneConfiguration();
+
+    // First try a client address with just a host name. Verify it falls
+    // back to the default port.
+    conf.set(KSMConfigKeys.OZONE_KSM_ADDRESS_KEY, "1.2.3.4");
+    InetSocketAddress addr = getKsmAddress(conf);
+    assertThat(addr.getHostString(), is("1.2.3.4"));
+    assertThat(addr.getPort(), is(KSMConfigKeys.OZONE_KSM_PORT_DEFAULT));
+
+    // Next try a client address with just a host name and port. Verify the port
+    // is ignored and the default KSM port is used.
+    conf.set(KSMConfigKeys.OZONE_KSM_ADDRESS_KEY, "1.2.3.4:100");
+    addr = getKsmAddress(conf);
+    assertThat(addr.getHostString(), is("1.2.3.4"));
+    assertThat(addr.getPort(), is(100));
+
+    // Assert the we are able to use default configs if no value is specified.
+    conf.set(KSMConfigKeys.OZONE_KSM_ADDRESS_KEY, "");
+    addr = getKsmAddress(conf);
+    assertThat(addr.getHostString(), is("0.0.0.0"));
+    assertThat(addr.getPort(), is(KSMConfigKeys.OZONE_KSM_PORT_DEFAULT));
+  }
+}
diff --git a/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/package-info.java b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/package-info.java
new file mode 100644
index 0000000..be63eab
--- /dev/null
+++ b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client;
+
+/**
+ * This package contains test classes for Ozone Client.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/dev-support/findbugsExcludeFile.xml b/hadoop-ozone/common/dev-support/findbugsExcludeFile.xml
new file mode 100644
index 0000000..df58f36
--- /dev/null
+++ b/hadoop-ozone/common/dev-support/findbugsExcludeFile.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<FindBugsFilter>
+  <Match>
+    <Package name="org.apache.hadoop.ozone.protocol.proto"/>
+  </Match>
+</FindBugsFilter>
diff --git a/hadoop-ozone/common/pom.xml b/hadoop-ozone/common/pom.xml
new file mode 100644
index 0000000..6ddba0f
--- /dev/null
+++ b/hadoop-ozone/common/pom.xml
@@ -0,0 +1,88 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-ozone</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-ozone-common</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone Common libraris</description>
+  <name>Apache Hadoop Ozone Common</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>ozone</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-maven-plugins</artifactId>
+        <executions>
+          <execution>
+            <id>compile-protoc</id>
+            <goals>
+              <goal>protoc</goal>
+            </goals>
+            <configuration>
+              <protocVersion>${protobuf.version}</protocVersion>
+              <protocCommand>${protoc.path}</protocCommand>
+              <imports>
+                <param>
+                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/
+                </param>
+                <param>
+                  ${basedir}/../../hadoop-hdds/common/src/main/proto/
+                </param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>KeySpaceManagerProtocol.proto</include>
+                </includes>
+              </source>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <configuration>
+          <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/bin/ozone b/hadoop-ozone/common/src/main/bin/ozone
new file mode 100755
index 0000000..7419743
--- /dev/null
+++ b/hadoop-ozone/common/src/main/bin/ozone
@@ -0,0 +1,188 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# The name of the script being executed.
+HADOOP_SHELL_EXECNAME="ozone"
+MYNAME="${BASH_SOURCE-$0}"
+
+## @description  build up the hdfs command's usage text.
+## @audience     public
+## @stability    stable
+## @replaceable  no
+function hadoop_usage
+{
+  hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
+  hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
+  hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in worker mode"
+  hadoop_add_option "--loglevel level" "set the log4j level for this command"
+  hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
+  hadoop_add_option "--workers" "turn on worker mode"
+
+
+  hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
+  hadoop_add_subcommand "datanode" daemon "run a DFS datanode"
+  hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
+  hadoop_add_subcommand "freon" client "runs an ozone data generator"
+  hadoop_add_subcommand "genesis" client "runs a collection of ozone benchmarks to help with tuning."
+  hadoop_add_subcommand "getozoneconf" client "get ozone config values from
+  configuration"
+  hadoop_add_subcommand "jmxget" admin "get JMX exported values from NameNode or DataNode."
+  hadoop_add_subcommand "ksm" daemon "Ozone keyspace manager"
+  hadoop_add_subcommand "o3" client "command line interface for ozone"
+  hadoop_add_subcommand "noz" client "ozone debug tool, convert ozone metadata into relational data"
+  hadoop_add_subcommand "scm" daemon "run the Storage Container Manager service"
+  hadoop_add_subcommand "scmcli" client "run the CLI of the Storage Container Manager "
+  hadoop_add_subcommand "version" client "print the version"
+
+  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" false
+}
+
+## @description  Default command handler for hadoop command
+## @audience     public
+## @stability    stable
+## @replaceable  no
+## @param        CLI arguments
+function ozonecmd_case
+{
+  subcmd=$1
+  shift
+
+  case ${subcmd} in
+    classpath)
+      hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
+    ;;
+    datanode)
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+      HADOOP_SECURE_CLASSNAME="org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter"
+      HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.datanode.DataNode'
+      hadoop_deprecate_envvar HADOOP_SECURE_DN_PID_DIR HADOOP_SECURE_PID_DIR
+      hadoop_deprecate_envvar HADOOP_SECURE_DN_LOG_DIR HADOOP_SECURE_LOG_DIR
+    ;;
+    envvars)
+      echo "JAVA_HOME='${JAVA_HOME}'"
+      echo "HADOOP_HDFS_HOME='${HADOOP_HDFS_HOME}'"
+      echo "HDFS_DIR='${HDFS_DIR}'"
+      echo "HDFS_LIB_JARS_DIR='${HDFS_LIB_JARS_DIR}'"
+      echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
+      echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
+      echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
+      echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
+      if [[ -n "${QATESTMODE}" ]]; then
+        echo "MYNAME=${MYNAME}"
+        echo "HADOOP_SHELL_EXECNAME=${HADOOP_SHELL_EXECNAME}"
+      fi
+      exit 0
+    ;;
+    freon)
+      HADOOP_CLASSNAME=org.apache.hadoop.ozone.freon.Freon
+    ;;
+    genesis)
+      HADOOP_CLASSNAME=org.apache.hadoop.ozone.genesis.Genesis
+    ;;
+    getozoneconf)
+      HADOOP_CLASSNAME=org.apache.hadoop.ozone.freon.OzoneGetConf;
+    ;;
+    ksm)
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+      HADOOP_CLASSNAME=org.apache.hadoop.ozone.ksm.KeySpaceManager
+    ;;
+    oz)
+      HADOOP_CLASSNAME=org.apache.hadoop.ozone.web.ozShell.Shell
+    ;;
+    noz)
+      HADOOP_CLASSNAME=org.apache.hadoop.ozone.scm.cli.SQLCLI
+    ;;
+    scm)
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+      HADOOP_CLASSNAME='org.apache.hadoop.hdds.scm.StorageContainerManager'
+      hadoop_debug "Appending HDFS_STORAGECONTAINERMANAGER_OPTS onto HADOOP_OPTS"
+      HADOOP_OPTS="${HADOOP_OPTS} ${HDFS_STORAGECONTAINERMANAGER_OPTS}"
+    ;;
+    scmcli)
+      HADOOP_CLASSNAME=org.apache.hadoop.ozone.scm.cli.SCMCLI
+    ;;
+    version)
+      HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
+    ;;
+    *)
+      HADOOP_CLASSNAME="${subcmd}"
+      if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
+        hadoop_exit_with_usage 1
+      fi
+    ;;
+  esac
+}
+
+# let's locate libexec...
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
+else
+  bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
+  HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
+fi
+
+HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
+# shellcheck disable=SC2034
+HADOOP_NEW_CONFIG=true
+if [[ -f "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" ]]; then
+  # shellcheck source=./hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh
+  . "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh"
+else
+  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hdfs-config.sh." 2>&1
+  exit 1
+fi
+
+# now that we have support code, let's abs MYNAME so we can use it later
+MYNAME=$(hadoop_abs "${MYNAME}")
+
+if [[ $# = 0 ]]; then
+  hadoop_exit_with_usage 1
+fi
+
+HADOOP_SUBCMD=$1
+shift
+
+if hadoop_need_reexec ozone "${HADOOP_SUBCMD}"; then
+  hadoop_uservar_su ozone "${HADOOP_SUBCMD}" \
+    "${MYNAME}" \
+    "--reexec" \
+    "${HADOOP_USER_PARAMS[@]}"
+  exit $?
+fi
+
+hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
+
+HADOOP_SUBCMD_ARGS=("$@")
+
+if declare -f ozone_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
+  hadoop_debug "Calling dynamically: ozone_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}"
+  "ozone_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
+else
+  ozonecmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
+fi
+
+hadoop_add_client_opts
+
+if [[ ${HADOOP_WORKER_MODE} = true ]]; then
+  hadoop_common_worker_mode_execute "${HADOOP_HDFS_HOME}/bin/ozone" "${HADOOP_USER_PARAMS[@]}"
+  exit $?
+fi
+
+hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
+
+# everything is in globals at this point, so call the generic handler
+hadoop_generic_java_subcmd_handler
diff --git a/hadoop-ozone/common/src/main/bin/start-ozone.sh b/hadoop-ozone/common/src/main/bin/start-ozone.sh
new file mode 100644
index 0000000..dda0a1c
--- /dev/null
+++ b/hadoop-ozone/common/src/main/bin/start-ozone.sh
@@ -0,0 +1,105 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Start hadoop hdfs and ozone daemons.
+# Run this on master node.
+## @description  usage info
+## @audience     private
+## @stability    evolving
+## @replaceable  no
+function hadoop_usage
+{
+  echo "Usage: start-ozone.sh"
+}
+
+this="${BASH_SOURCE-$0}"
+bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+
+# let's locate libexec...
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
+else
+  HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
+fi
+
+HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
+# shellcheck disable=SC2034
+HADOOP_NEW_CONFIG=true
+if [[ -f "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" ]]; then
+  # shellcheck disable=SC1090
+  . "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh"
+else
+  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hdfs-config.sh." 2>&1
+  exit 1
+fi
+
+SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-)
+SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-)
+
+if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then
+  echo "Ozone is not supported in a security enabled cluster."
+  exit 1
+fi
+
+#---------------------------------------------------------
+# Check if ozone is enabled
+OZONE_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey ozone.enabled | tr '[:upper:]' '[:lower:]' 2>&-)
+if [[ "${OZONE_ENABLED}" != "true" ]]; then
+  echo "Operation is not supported because ozone is not enabled."
+  exit -1
+fi
+
+#---------------------------------------------------------
+# Start hdfs before starting ozone daemons
+if [[ -f "${bin}/start-dfs.sh" ]]; then
+  "${bin}/start-dfs.sh"
+else
+  echo "ERROR: Cannot execute ${bin}/start-dfs.sh." 2>&1
+  exit 1
+fi
+
+#---------------------------------------------------------
+# Ozone keyspacemanager nodes
+KSM_NODES=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -keyspacemanagers 2>/dev/null)
+echo "Starting key space manager nodes [${KSM_NODES}]"
+if [[ "${KSM_NODES}" == "0.0.0.0" ]]; then
+  KSM_NODES=$(hostname)
+fi
+
+hadoop_uservar_su hdfs ksm "${HADOOP_HDFS_HOME}/bin/ozone" \
+  --workers \
+  --config "${HADOOP_CONF_DIR}" \
+  --hostnames "${KSM_NODES}" \
+  --daemon start \
+  ksm
+
+HADOOP_JUMBO_RETCOUNTER=$?
+
+#---------------------------------------------------------
+# Ozone storagecontainermanager nodes
+SCM_NODES=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -storagecontainermanagers 2>/dev/null)
+echo "Starting storage container manager nodes [${SCM_NODES}]"
+hadoop_uservar_su hdfs scm "${HADOOP_HDFS_HOME}/bin/ozone" \
+  --workers \
+  --config "${HADOOP_CONF_DIR}" \
+  --hostnames "${SCM_NODES}" \
+  --daemon start \
+  scm
+
+(( HADOOP_JUMBO_RETCOUNTER=HADOOP_JUMBO_RETCOUNTER + $? ))
+
+exit ${HADOOP_JUMBO_RETCOUNTER}
diff --git a/hadoop-ozone/common/src/main/bin/stop-ozone.sh b/hadoop-ozone/common/src/main/bin/stop-ozone.sh
new file mode 100644
index 0000000..be55be4
--- /dev/null
+++ b/hadoop-ozone/common/src/main/bin/stop-ozone.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Stop hdfs and ozone daemons.
+# Run this on master node.
+## @description  usage info
+## @audience     private
+## @stability    evolving
+## @replaceable  no
+function hadoop_usage
+{
+  echo "Usage: stop-ozone.sh"
+}
+
+this="${BASH_SOURCE-$0}"
+bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+
+# let's locate libexec...
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
+else
+  HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
+fi
+
+HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
+# shellcheck disable=SC2034
+HADOOP_NEW_CONFIG=true
+if [[ -f "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" ]]; then
+  # shellcheck disable=SC1090
+  . "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh"
+else
+  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hdfs-config.sh." 2>&1
+  exit 1
+fi
+
+SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-)
+SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-)
+
+if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then
+  echo "Ozone is not supported in a security enabled cluster."
+  exit 1
+fi
+
+#---------------------------------------------------------
+# Check if ozone is enabled
+OZONE_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey ozone.enabled | tr '[:upper:]' '[:lower:]' 2>&-)
+if [[ "${OZONE_ENABLED}" != "true" ]]; then
+  echo "Operation is not supported because ozone is not enabled."
+  exit -1
+fi
+
+#---------------------------------------------------------
+# Start hdfs before starting ozone daemons
+if [[ -f "${bin}/stop-dfs.sh" ]]; then
+  "${bin}/stop-dfs.sh"
+else
+  echo "ERROR: Cannot execute ${bin}/stop-dfs.sh." 2>&1
+  exit 1
+fi
+
+#---------------------------------------------------------
+# Ozone keyspacemanager nodes
+KSM_NODES=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -keyspacemanagers 2>/dev/null)
+echo "Stopping key space manager nodes [${KSM_NODES}]"
+if [[ "${KSM_NODES}" == "0.0.0.0" ]]; then
+  KSM_NODES=$(hostname)
+fi
+
+hadoop_uservar_su hdfs ksm "${HADOOP_HDFS_HOME}/bin/ozone" \
+  --workers \
+  --config "${HADOOP_CONF_DIR}" \
+  --hostnames "${KSM_NODES}" \
+  --daemon stop \
+  ksm
+
+#---------------------------------------------------------
+# Ozone storagecontainermanager nodes
+SCM_NODES=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -storagecontainermanagers 2>/dev/null)
+echo "Stopping storage container manager nodes [${SCM_NODES}]"
+hadoop_uservar_su hdfs scm "${HADOOP_HDFS_HOME}/bin/ozone" \
+  --workers \
+  --config "${HADOOP_CONF_DIR}" \
+  --hostnames "${SCM_NODES}" \
+  --daemon stop \
+  scm
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/conf/ozone-site.xml b/hadoop-ozone/common/src/main/conf/ozone-site.xml
new file mode 100644
index 0000000..77dd7ef
--- /dev/null
+++ b/hadoop-ozone/common/src/main/conf/ozone-site.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+</configuration>
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/KsmUtils.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/KsmUtils.java
new file mode 100644
index 0000000..ebada1c
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/KsmUtils.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
+
+import com.google.common.base.Optional;
+import static org.apache.hadoop.hdds.HddsUtils.getHostNameFromConfigKeys;
+import static org.apache.hadoop.hdds.HddsUtils.getPortNumberFromConfigKeys;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys.OZONE_KSM_ADDRESS_KEY;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_BIND_HOST_DEFAULT;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys.OZONE_KSM_PORT_DEFAULT;
+
+/**
+ * Stateless helper functions for the server and client side of KSM
+ * communication.
+ */
+public final class KsmUtils {
+
+  private KsmUtils() {
+  }
+
+  /**
+   * Retrieve the socket address that is used by KSM.
+   * @param conf
+   * @return Target InetSocketAddress for the SCM service endpoint.
+   */
+  public static InetSocketAddress getKsmAddress(
+      Configuration conf) {
+    final Optional<String> host = getHostNameFromConfigKeys(conf,
+        OZONE_KSM_ADDRESS_KEY);
+
+    // If no port number is specified then we'll just try the defaultBindPort.
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        OZONE_KSM_ADDRESS_KEY);
+
+    return NetUtils.createSocketAddr(
+        host.or(OZONE_KSM_BIND_HOST_DEFAULT) + ":" +
+            port.or(OZONE_KSM_PORT_DEFAULT));
+  }
+
+  /**
+   * Retrieve the socket address that should be used by clients to connect
+   * to KSM.
+   * @param conf
+   * @return Target InetSocketAddress for the KSM service endpoint.
+   */
+  public static InetSocketAddress getKsmAddressForClients(
+      Configuration conf) {
+    final Optional<String> host = getHostNameFromConfigKeys(conf,
+        OZONE_KSM_ADDRESS_KEY);
+
+    if (!host.isPresent()) {
+      throw new IllegalArgumentException(
+          OZONE_KSM_ADDRESS_KEY + " must be defined. See" +
+              " https://wiki.apache.org/hadoop/Ozone#Configuration for" +
+              " details on configuring Ozone.");
+    }
+
+    // If no port number is specified then we'll just try the defaultBindPort.
+    final Optional<Integer> port = getPortNumberFromConfigKeys(conf,
+        OZONE_KSM_ADDRESS_KEY);
+
+    return NetUtils.createSocketAddr(
+        host.get() + ":" + port.or(OZONE_KSM_PORT_DEFAULT));
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/io/LengthInputStream.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/io/LengthInputStream.java
new file mode 100644
index 0000000..baf1887
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/io/LengthInputStream.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.io;
+
+import java.io.FilterInputStream;
+import java.io.InputStream;
+
+/**
+ * An input stream with length.
+ */
+public class LengthInputStream extends FilterInputStream {
+
+  private final long length;
+
+  /**
+   * Create an stream.
+   * @param in the underlying input stream.
+   * @param length the length of the stream.
+   */
+  public LengthInputStream(InputStream in, long length) {
+    super(in);
+    this.length = length;
+  }
+
+  /** @return the length. */
+  public long getLength() {
+    return length;
+  }
+
+  public InputStream getWrappedStream() {
+    return in;
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/io/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/io/package-info.java
new file mode 100644
index 0000000..ece1ff4
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/io/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.client.io;
+
+/**
+ * IO related ozone helper classes.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/OzoneException.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/OzoneException.java
new file mode 100644
index 0000000..953e399
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/OzoneException.java
@@ -0,0 +1,267 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+
+/**
+ * Class the represents various errors returned by the
+ * Ozone Layer.
+ */
+@InterfaceAudience.Private
+public class OzoneException extends Exception {
+
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(OzoneException.class);
+  private static final ObjectMapper MAPPER;
+
+  static {
+    MAPPER = new ObjectMapper();
+    MAPPER.setVisibility(
+        MAPPER.getSerializationConfig().getDefaultVisibilityChecker()
+            .withCreatorVisibility(JsonAutoDetect.Visibility.NONE)
+            .withFieldVisibility(JsonAutoDetect.Visibility.NONE)
+            .withGetterVisibility(JsonAutoDetect.Visibility.NONE)
+            .withIsGetterVisibility(JsonAutoDetect.Visibility.NONE)
+            .withSetterVisibility(JsonAutoDetect.Visibility.NONE));
+  }
+
+  @JsonProperty("httpCode")
+  private long httpCode;
+  @JsonProperty("shortMessage")
+  private String shortMessage;
+  @JsonProperty("resource")
+  private String resource;
+  @JsonProperty("message")
+  private String message;
+  @JsonProperty("requestID")
+  private String requestId;
+  @JsonProperty("hostName")
+  private String hostID;
+
+  /**
+   * Constructs a new exception with {@code null} as its detail message. The
+   * cause is not initialized, and may subsequently be initialized by a call
+   * to {@link #initCause}.
+   *
+   * This constructor is needed by Json Serializer.
+   */
+  public OzoneException() {
+  }
+
+
+  /**
+   * Constructor that allows a shortMessage and exception.
+   *
+   * @param httpCode Error Code
+   * @param shortMessage Short Message
+   * @param ex Exception
+   */
+  public OzoneException(long httpCode, String shortMessage, Exception ex) {
+    super(ex);
+    this.message = ex.getMessage();
+    this.shortMessage = shortMessage;
+    this.httpCode = httpCode;
+  }
+
+
+  /**
+   * Constructor that allows a shortMessage.
+   *
+   * @param httpCode Error Code
+   * @param shortMessage Short Message
+   */
+  public OzoneException(long httpCode, String shortMessage) {
+    this.shortMessage = shortMessage;
+    this.httpCode = httpCode;
+  }
+
+  /**
+   * Constructor that allows a shortMessage and long message.
+   *
+   * @param httpCode Error Code
+   * @param shortMessage Short Message
+   * @param message long error message
+   */
+  public OzoneException(long httpCode, String shortMessage, String message) {
+    this.shortMessage = shortMessage;
+    this.message = message;
+    this.httpCode = httpCode;
+  }
+
+  /**
+   * Constructor that allows a shortMessage, a long message and an exception.
+   *
+   * @param httpCode Error code
+   * @param shortMessage Short message
+   * @param message Long error message
+   * @param ex Exception
+   */
+  public OzoneException(long httpCode, String shortMessage,
+      String message, Exception ex) {
+    super(ex);
+    this.shortMessage = shortMessage;
+    this.message = message;
+    this.httpCode = httpCode;
+  }
+
+  /**
+   * Returns the Resource that was involved in the stackTraceString.
+   *
+   * @return String
+   */
+  public String getResource() {
+    return resource;
+  }
+
+  /**
+   * Sets Resource.
+   *
+   * @param resourceName - Name of the Resource
+   */
+  public void setResource(String resourceName) {
+    this.resource = resourceName;
+  }
+
+  /**
+   * Gets a detailed message for the error.
+   *
+   * @return String
+   */
+  public String getMessage() {
+    return message;
+  }
+
+  /**
+   * Sets the error message.
+   *
+   * @param longMessage - Long message
+   */
+  public void setMessage(String longMessage) {
+    this.message = longMessage;
+  }
+
+  /**
+   * Returns request Id.
+   *
+   * @return String
+   */
+  public String getRequestId() {
+    return requestId;
+  }
+
+  /**
+   * Sets request ID.
+   *
+   * @param ozoneRequestId Request ID generated by the Server
+   */
+  public void setRequestId(String ozoneRequestId) {
+    this.requestId = ozoneRequestId;
+  }
+
+  /**
+   * Returns short error string.
+   *
+   * @return String
+   */
+  public String getShortMessage() {
+    return shortMessage;
+  }
+
+  /**
+   * Sets short error string.
+   *
+   * @param shortError Short Error Code
+   */
+  public void setShortMessage(String shortError) {
+    this.shortMessage = shortError;
+  }
+
+  /**
+   * Returns hostID.
+   *
+   * @return String
+   */
+  public String getHostID() {
+    return hostID;
+  }
+
+  /**
+   * Sets host ID.
+   *
+   * @param hostName host Name
+   */
+  public void setHostID(String hostName) {
+    this.hostID = hostName;
+  }
+
+  /**
+   * Returns http error code.
+   *
+   * @return long
+   */
+  public long getHttpCode() {
+    return httpCode;
+  }
+
+  /**
+   * Sets http status.
+   *
+   * @param httpStatus http error code.
+   */
+  public void setHttpCode(long httpStatus) {
+    this.httpCode = httpStatus;
+  }
+
+  /**
+   * Returns a Json String.
+   *
+   * @return JSON representation of the Error
+   */
+  public String toJsonString() {
+    try {
+      return MAPPER.writeValueAsString(this);
+    } catch (IOException ex) {
+      // TODO : Log this error on server side.
+    }
+    // TODO : Replace this with a JSON Object -- That represents this error.
+    return "500 Internal Server Error";
+  }
+
+  /**
+   * Parses an Exception record.
+   *
+   * @param jsonString - Exception in Json format.
+   *
+   * @return OzoneException Object
+   *
+   * @throws IOException
+   */
+  public static OzoneException parse(String jsonString) throws IOException {
+    return READER.readValue(jsonString);
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/headers/Header.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/headers/Header.java
new file mode 100644
index 0000000..00d48576
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/headers/Header.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest.headers;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * OZONE specific HTTP headers.
+ */
+@InterfaceAudience.Private
+public final class Header {
+  public static final String OZONE_QUOTA_BYTES = "BYTES";
+  public static final String OZONE_QUOTA_MB = "MB";
+  public static final String OZONE_QUOTA_GB = "GB";
+  public static final String OZONE_QUOTA_TB = "TB";
+  public static final String OZONE_QUOTA_REMOVE = "remove";
+  public static final String OZONE_QUOTA_UNDEFINED = "undefined";
+  public static final String OZONE_EMPTY_STRING="";
+  public static final String OZONE_DEFAULT_LIST_SIZE = "1000";
+
+  public static final String OZONE_USER = "x-ozone-user";
+  public static final String OZONE_SIMPLE_AUTHENTICATION_SCHEME = "OZONE";
+  public static final String OZONE_VERSION_HEADER = "x-ozone-version";
+  public static final String OZONE_V1_VERSION_HEADER ="v1";
+
+  public static final String OZONE_LIST_QUERY_SERVICE = "service";
+
+  public static final String OZONE_INFO_QUERY_VOLUME = "volume";
+  public static final String OZONE_INFO_QUERY_BUCKET = "bucket";
+  public static final String OZONE_INFO_QUERY_KEY = "key";
+
+  public static final String OZONE_REQUEST_ID = "x-ozone-request-id";
+  public static final String OZONE_SERVER_NAME = "x-ozone-server-name";
+
+  public static final String OZONE_STORAGE_TYPE = "x-ozone-storage-type";
+
+  public static final String OZONE_BUCKET_VERSIONING =
+      "x-ozone-bucket-versioning";
+
+  public static final String OZONE_ACLS = "x-ozone-acls";
+  public static final String OZONE_ACL_ADD = "ADD";
+  public static final String OZONE_ACL_REMOVE = "REMOVE";
+
+  public static final String OZONE_INFO_QUERY_TAG ="info";
+  public static final String OZONE_QUOTA_QUERY_TAG ="quota";
+  public static final String CONTENT_MD5 = "Content-MD5";
+  public static final String OZONE_LIST_QUERY_PREFIX="prefix";
+  public static final String OZONE_LIST_QUERY_MAXKEYS="max-keys";
+  public static final String OZONE_LIST_QUERY_PREVKEY="prev-key";
+  public static final String OZONE_LIST_QUERY_ROOTSCAN="root-scan";
+
+  private Header() {
+    // Never constructed.
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/headers/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/headers/package-info.java
new file mode 100644
index 0000000..76bc206
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/headers/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.client.rest.headers;
+
+/**
+ * Ozone HTTP Header utility.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/package-info.java
new file mode 100644
index 0000000..fc86dbb
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.client.rest;
+
+/**
+ * Ozone REST interface.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/BucketInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/BucketInfo.java
new file mode 100644
index 0000000..af89b39
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/BucketInfo.java
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.client.rest.response;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.OzoneConsts;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.google.common.base.Preconditions;
+
+/**
+ * BucketInfo class is used used for parsing json response
+ * when BucketInfo Call is made.
+ */
+public class BucketInfo implements Comparable<BucketInfo> {
+
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(BucketInfo.class);
+
+  private String volumeName;
+  private String bucketName;
+  private String createdOn;
+  private List<OzoneAcl> acls;
+  private OzoneConsts.Versioning versioning;
+  private StorageType storageType;
+
+  /**
+   * Constructor for BucketInfo.
+   *
+   * @param volumeName
+   * @param bucketName
+   */
+  public BucketInfo(String volumeName, String bucketName) {
+    this.volumeName = volumeName;
+    this.bucketName = bucketName;
+  }
+
+
+  /**
+   * Default constructor for BucketInfo.
+   */
+  public BucketInfo() {
+    acls = new LinkedList<>();
+  }
+
+  /**
+   * Parse a JSON string into BucketInfo Object.
+   *
+   * @param jsonString Json String
+   * @return BucketInfo
+   * @throws IOException
+   */
+  public static BucketInfo parse(String jsonString) throws IOException {
+    return READER.readValue(jsonString);
+  }
+
+  /**
+   * Returns a List of ACLs set on the Bucket.
+   *
+   * @return List of Acl
+   */
+  public List<OzoneAcl> getAcls() {
+    return acls;
+  }
+
+  /**
+   * Sets ACls.
+   *
+   * @param acls Acl list
+   */
+  public void setAcls(List<OzoneAcl> acls) {
+    this.acls = acls;
+  }
+
+  /**
+   * Returns Storage Type info.
+   *
+   * @return Storage Type of the bucket
+   */
+  public StorageType getStorageType() {
+    return storageType;
+  }
+
+  /**
+   * Sets the Storage Type.
+   *
+   * @param storageType Storage Type
+   */
+  public void setStorageType(StorageType storageType) {
+    this.storageType = storageType;
+  }
+
+  /**
+   * Returns versioning.
+   *
+   * @return versioning Enum
+   */
+  public OzoneConsts.Versioning getVersioning() {
+    return versioning;
+  }
+
+  /**
+   * Sets Versioning.
+   *
+   * @param versioning
+   */
+  public void setVersioning(OzoneConsts.Versioning versioning) {
+    this.versioning = versioning;
+  }
+
+
+  /**
+   * Gets bucket Name.
+   *
+   * @return String
+   */
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  /**
+   * Sets bucket Name.
+   *
+   * @param bucketName Name of the bucket
+   */
+  public void setBucketName(String bucketName) {
+    this.bucketName = bucketName;
+  }
+
+  /**
+   * Sets creation time of the bucket.
+   *
+   * @param creationTime Date String
+   */
+  public void setCreatedOn(String creationTime) {
+    this.createdOn = creationTime;
+  }
+
+  /**
+   * Returns creation time.
+   *
+   * @return creation time of bucket.
+   */
+  public String getCreatedOn() {
+    return createdOn;
+  }
+
+  /**
+   * Returns Volume Name.
+   *
+   * @return String volume name
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Sets the Volume Name of bucket.
+   *
+   * @param volumeName volumeName
+   */
+  public void setVolumeName(String volumeName) {
+    this.volumeName = volumeName;
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less
+   * than, equal to, or greater than the specified object.
+   *
+   * Please note : BucketInfo compare functions are used only within the
+   * context of a volume, hence volume name is purposefully ignored in
+   * compareTo, equal and hashcode functions of this class.
+   */
+  @Override
+  public int compareTo(BucketInfo o) {
+    Preconditions.checkState(o.getVolumeName().equals(this.getVolumeName()));
+    return this.bucketName.compareTo(o.getBucketName());
+  }
+
+  /**
+   * Checks if two bucketInfo's are equal.
+   * @param o Object BucketInfo
+   * @return  True or False
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof BucketInfo)) {
+      return false;
+    }
+
+    BucketInfo that = (BucketInfo) o;
+    Preconditions.checkState(that.getVolumeName().equals(this.getVolumeName()));
+    return bucketName.equals(that.bucketName);
+
+  }
+
+  /**
+   * Hash Code for this object.
+   * @return int
+   */
+  @Override
+  public int hashCode() {
+    return bucketName.hashCode();
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/KeyInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/KeyInfo.java
new file mode 100644
index 0000000..2e1df5f
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/KeyInfo.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest.response;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.lang.builder.HashCodeBuilder;
+
+/**
+ * KeyInfo class is used used for parsing json response
+ * when KeyInfo Call is made.
+ */
+public class KeyInfo implements Comparable<KeyInfo> {
+
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(KeyInfo.class);
+
+  private long version;
+  private String md5hash;
+  private String createdOn;
+  private String modifiedOn;
+  private long size;
+  private String keyName;
+
+  /**
+   * When this key was created.
+   *
+   * @return Date String
+   */
+  public String getCreatedOn() {
+    return createdOn;
+  }
+
+  /**
+   * When this key was modified.
+   *
+   * @return Date String
+   */
+  public String getModifiedOn() {
+    return modifiedOn;
+  }
+
+  /**
+   * When this key was created.
+   *
+   * @param createdOn Date String
+   */
+  public void setCreatedOn(String createdOn) {
+    this.createdOn = createdOn;
+  }
+
+  /**
+   * When this key was modified.
+   *
+   * @param modifiedOn Date String
+   */
+  public void setModifiedOn(String modifiedOn) {
+    this.modifiedOn = modifiedOn;
+  }
+
+  /**
+   * Gets the Key name of this object.
+   *
+   * @return String
+   */
+  public String getKeyName() {
+    return keyName;
+  }
+
+  /**
+   * Sets the Key name of this object.
+   *
+   * @param keyName String
+   */
+  public void setKeyName(String keyName) {
+    this.keyName = keyName;
+  }
+
+  /**
+   * Returns the MD5 Hash for the data of this key.
+   *
+   * @return String MD5
+   */
+  public String getMd5hash() {
+    return md5hash;
+  }
+
+  /**
+   * Sets the MD5 value of this key.
+   *
+   * @param md5hash Md5 of this file
+   */
+  public void setMd5hash(String md5hash) {
+    this.md5hash = md5hash;
+  }
+
+  /**
+   * Number of bytes stored in the data part of this key.
+   *
+   * @return long size of the data file
+   */
+  public long getSize() {
+    return size;
+  }
+
+  /**
+   * Sets the size of the data part of this key.
+   *
+   * @param size Size in long
+   */
+  public void setSize(long size) {
+    this.size = size;
+  }
+
+  /**
+   * Version of this key.
+   *
+   * @return returns the version of this key.
+   */
+  public long getVersion() {
+    return version;
+  }
+
+  /**
+   * Sets the version of this key.
+   *
+   * @param version - Version String
+   */
+  public void setVersion(long version) {
+    this.version = version;
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less
+   * than, equal to, or greater than the specified object.
+   *
+   * @param o the object to be compared.
+   * @return a negative integer, zero, or a positive integer as this object
+   * is less than, equal to, or greater than the specified object.
+   * @throws NullPointerException if the specified object is null
+   * @throws ClassCastException   if the specified object's type prevents it
+   *                              from being compared to this object.
+   */
+  @Override
+  public int compareTo(KeyInfo o) {
+    if (this.keyName.compareTo(o.getKeyName()) != 0) {
+      return this.keyName.compareTo(o.getKeyName());
+    }
+
+    if (this.getVersion() == o.getVersion()) {
+      return 0;
+    }
+    if (this.getVersion() < o.getVersion()) {
+      return -1;
+    }
+    return 1;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    KeyInfo keyInfo = (KeyInfo) o;
+
+    return new EqualsBuilder()
+        .append(version, keyInfo.version)
+        .append(keyName, keyInfo.keyName)
+        .isEquals();
+  }
+
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder(17, 37)
+        .append(version)
+        .append(keyName)
+        .toHashCode();
+  }
+
+  /**
+   * Parse a string to return KeyInfo Object.
+   *
+   * @param jsonString Json String
+   * @return keyInfo
+   * @throws IOException
+   */
+  public static KeyInfo parse(String jsonString) throws IOException {
+    return READER.readValue(jsonString);
+  }
+
+}
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/VolumeInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/VolumeInfo.java
new file mode 100644
index 0000000..f98b56a
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/VolumeInfo.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest.response;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+
+/**
+ * VolumeInfo Class is used for parsing json response
+ * when VolumeInfo Call is made.
+ */
+@InterfaceAudience.Private
+public class VolumeInfo implements Comparable<VolumeInfo> {
+
+
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(VolumeInfo.class);
+
+  private VolumeOwner owner;
+  private OzoneQuota quota;
+  private String volumeName;
+  private String createdOn;
+  private String createdBy;
+
+
+  /**
+   * Constructor for VolumeInfo.
+   *
+   * @param volumeName - Name of the Volume
+   * @param createdOn _ Date String
+   * @param createdBy - Person who created it
+   */
+  public VolumeInfo(String volumeName, String createdOn,
+                    String createdBy) {
+    this.volumeName = volumeName;
+    this.createdOn = createdOn;
+    this.createdBy = createdBy;
+  }
+
+  /**
+   * Constructor for VolumeInfo.
+   */
+  public VolumeInfo() {
+  }
+
+  /**
+   * gets the volume name.
+   *
+   * @return Volume Name
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Sets the volume name.
+   *
+   * @param volumeName Volume Name
+   */
+  public void setVolumeName(String volumeName) {
+    this.volumeName = volumeName;
+  }
+
+
+  /**
+   * Returns the name of the person who created this volume.
+   *
+   * @return Name of Admin who created this
+   */
+  public String getCreatedBy() {
+    return createdBy;
+  }
+
+  /**
+   * Sets the user name of the person who created this volume.
+   *
+   * @param createdBy UserName
+   */
+  public void setCreatedBy(String createdBy) {
+    this.createdBy = createdBy;
+  }
+
+  /**
+   * Gets the date on which this volume was created.
+   *
+   * @return Date String
+   */
+  public String getCreatedOn() {
+    return createdOn;
+  }
+
+  /**
+   * Sets the date string.
+   *
+   * @param createdOn Date String
+   */
+  public void setCreatedOn(String createdOn) {
+    this.createdOn = createdOn;
+  }
+
+  /**
+   * Returns the owner info.
+   *
+   * @return OwnerInfo
+   */
+  public VolumeOwner getOwner() {
+    return owner;
+  }
+
+  /**
+   * Sets the owner.
+   *
+   * @param owner OwnerInfo
+   */
+  public void setOwner(VolumeOwner owner) {
+    this.owner = owner;
+  }
+
+  /**
+   * Returns the quota information on a volume.
+   *
+   * @return Quota
+   */
+  public OzoneQuota getQuota() {
+    return quota;
+  }
+
+  /**
+   * Sets the quota info.
+   *
+   * @param quota Quota Info
+   */
+  public void setQuota(OzoneQuota quota) {
+    this.quota = quota;
+  }
+
+  /**
+   * Comparable Interface.
+   * @param o VolumeInfo Object.
+   * @return Result of comparison
+   */
+  @Override
+  public int compareTo(VolumeInfo o) {
+    return this.volumeName.compareTo(o.getVolumeName());
+  }
+
+  /**
+   * Returns VolumeInfo class from json string.
+   *
+   * @param data Json String
+   *
+   * @return VolumeInfo
+   *
+   * @throws IOException
+   */
+  public static VolumeInfo parse(String data) throws IOException {
+    return READER.readValue(data);
+  }
+
+  /**
+   * Indicates whether some other object is "equal to" this one.
+   *
+   * @param obj the reference object with which to compare.
+   *
+   * @return {@code true} if this object is the same as the obj
+   * argument; {@code false} otherwise.
+   */
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    VolumeInfo otherInfo = (VolumeInfo) obj;
+    return otherInfo.getVolumeName().equals(this.getVolumeName());
+  }
+
+  /**
+   * Returns a hash code value for the object. This method is
+   * supported for the benefit of hash tables such as those provided by
+   * HashMap.
+   * @return a hash code value for this object.
+   *
+   * @see Object#equals(Object)
+   * @see System#identityHashCode
+   */
+  @Override
+  public int hashCode() {
+    return getVolumeName().hashCode();
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/VolumeOwner.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/VolumeOwner.java
new file mode 100644
index 0000000..d4dbad4
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/VolumeOwner.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest.response;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+/**
+ * Volume Owner represents the owner of a volume.
+ *
+ * This is a class instead of a string since we might need to extend this class
+ * to support other forms of authentication.
+ */
+@InterfaceAudience.Private
+public class VolumeOwner {
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  private String name;
+
+  /**
+   * Constructor for VolumeOwner.
+   *
+   * @param name name of the User
+   */
+  public VolumeOwner(String name) {
+    this.name = name;
+  }
+
+  /**
+   * Constructs Volume Owner.
+   */
+  public VolumeOwner() {
+    name = null;
+  }
+
+  /**
+   * Returns the user name.
+   *
+   * @return Name
+   */
+  public String getName() {
+    return name;
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/package-info.java
new file mode 100644
index 0000000..432b029b
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/client/rest/response/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest.response;
+
+/**
+ * This package contains class for ozone rest client library.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/freon/OzoneGetConf.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/freon/OzoneGetConf.java
new file mode 100644
index 0000000..d5f9093
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/freon/OzoneGetConf.java
@@ -0,0 +1,269 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.freon;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdds.HddsUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.KsmUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * CLI utility to print out ozone related configuration.
+ */
+public class OzoneGetConf extends Configured implements Tool {
+
+  private static final String DESCRIPTION = "ozone getconf is utility for "
+      + "getting configuration information from the config file.\n";
+
+  enum Command {
+    INCLUDE_FILE("-includeFile",
+        "gets the include file path that defines the datanodes " +
+            "that can join the cluster."),
+    EXCLUDE_FILE("-excludeFile",
+        "gets the exclude file path that defines the datanodes " +
+            "that need to decommissioned."),
+    KEYSPACEMANAGER("-keyspacemanagers",
+        "gets list of ozone key space manager nodes in the cluster"),
+    STORAGECONTAINERMANAGER("-storagecontainermanagers",
+        "gets list of ozone storage container manager nodes in the cluster"),
+    CONFKEY("-confKey [key]", "gets a specific key from the configuration");
+
+    private static final Map<String, OzoneGetConf.CommandHandler> HANDLERS;
+
+    static {
+      HANDLERS = new HashMap<String, OzoneGetConf.CommandHandler>();
+      HANDLERS.put(StringUtils.toLowerCase(KEYSPACEMANAGER.getName()),
+          new KeySpaceManagersCommandHandler());
+      HANDLERS.put(StringUtils.toLowerCase(STORAGECONTAINERMANAGER.getName()),
+          new StorageContainerManagersCommandHandler());
+      HANDLERS.put(StringUtils.toLowerCase(CONFKEY.getName()),
+          new PrintConfKeyCommandHandler());
+    }
+
+    private final String cmd;
+    private final String description;
+
+    Command(String cmd, String description) {
+      this.cmd = cmd;
+      this.description = description;
+    }
+
+    public String getName() {
+      return cmd.split(" ")[0];
+    }
+
+    public String getUsage() {
+      return cmd;
+    }
+
+    public String getDescription() {
+      return description;
+    }
+
+    public static OzoneGetConf.CommandHandler getHandler(String cmd) {
+      return HANDLERS.get(StringUtils.toLowerCase(cmd));
+    }
+  }
+
+  static final String USAGE;
+  static {
+    HdfsConfiguration.init();
+
+    /* Initialize USAGE based on Command values */
+    StringBuilder usage = new StringBuilder(DESCRIPTION);
+    usage.append("\nozone getconf \n");
+    for (OzoneGetConf.Command cmd : OzoneGetConf.Command.values()) {
+      usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription()
+          + "\n");
+    }
+    USAGE = usage.toString();
+  }
+
+  /**
+   * Handler to return value for key corresponding to the
+   * {@link OzoneGetConf.Command}.
+   */
+  static class CommandHandler {
+    protected String key; // Configuration key to lookup
+
+    CommandHandler() {
+      this(null);
+    }
+
+    CommandHandler(String key) {
+      this.key = key;
+    }
+
+    final int doWork(OzoneGetConf tool, String[] args) {
+      try {
+        checkArgs(args);
+
+        return doWorkInternal(tool, args);
+      } catch (Exception e) {
+        tool.printError(e.getMessage());
+      }
+      return -1;
+    }
+
+    protected void checkArgs(String[] args) {
+      if (args.length > 0) {
+        throw new HadoopIllegalArgumentException(
+            "Did not expect argument: " + args[0]);
+      }
+    }
+
+
+    /** Method to be overridden by sub classes for specific behavior. */
+    int doWorkInternal(OzoneGetConf tool, String[] args) throws Exception {
+
+      String value = tool.getConf().getTrimmed(key);
+      if (value != null) {
+        tool.printOut(value);
+        return 0;
+      }
+      tool.printError("Configuration " + key + " is missing.");
+      return -1;
+    }
+  }
+
+  static class PrintConfKeyCommandHandler extends OzoneGetConf.CommandHandler {
+    @Override
+    protected void checkArgs(String[] args) {
+      if (args.length != 1) {
+        throw new HadoopIllegalArgumentException(
+            "usage: " + OzoneGetConf.Command.CONFKEY.getUsage());
+      }
+    }
+
+    @Override
+    int doWorkInternal(OzoneGetConf tool, String[] args) throws Exception {
+      this.key = args[0];
+      return super.doWorkInternal(tool, args);
+    }
+  }
+
+  private final PrintStream out; // Stream for printing command output
+  private final PrintStream err; // Stream for printing error
+
+  protected OzoneGetConf(Configuration conf) {
+    this(conf, System.out, System.err);
+  }
+
+  protected OzoneGetConf(Configuration conf, PrintStream out, PrintStream err) {
+    super(conf);
+    this.out = out;
+    this.err = err;
+  }
+
+  void printError(String message) {
+    err.println(message);
+  }
+
+  void printOut(String message) {
+    out.println(message);
+  }
+
+  private void printUsage() {
+    printError(USAGE);
+  }
+
+  /**
+   * Main method that runs the tool for given arguments.
+   * @param args arguments
+   * @return return status of the command
+   */
+  private int doWork(String[] args) {
+    if (args.length >= 1) {
+      OzoneGetConf.CommandHandler handler =
+          OzoneGetConf.Command.getHandler(args[0]);
+      if (handler != null) {
+        return handler.doWork(this, Arrays.copyOfRange(args, 1, args.length));
+      }
+    }
+    printUsage();
+    return -1;
+  }
+
+  @Override
+  public int run(final String[] args) throws Exception {
+    return SecurityUtil.doAsCurrentUser(
+          new PrivilegedExceptionAction<Integer>() {
+            @Override
+            public Integer run() throws Exception {
+              return doWork(args);
+            }
+          });
+  }
+
+  /**
+   * Handler for {@link Command#STORAGECONTAINERMANAGER}.
+   */
+  static class StorageContainerManagersCommandHandler extends CommandHandler {
+
+    @Override
+    public int doWorkInternal(OzoneGetConf tool, String[] args)
+        throws IOException {
+      Collection<InetSocketAddress> addresses = HddsUtils
+          .getSCMAddresses(tool.getConf());
+
+      for (InetSocketAddress addr : addresses) {
+        tool.printOut(addr.getHostName());
+      }
+      return 0;
+    }
+  }
+
+  /**
+   * Handler for {@link Command#KEYSPACEMANAGER}.
+   */
+  static class KeySpaceManagersCommandHandler extends CommandHandler {
+    @Override
+    public int doWorkInternal(OzoneGetConf tool, String[] args)
+        throws IOException {
+      tool.printOut(KsmUtils.getKsmAddress(tool.getConf()).getHostName());
+      return 0;
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
+      System.exit(0);
+    }
+
+    Configuration conf = new Configuration();
+    conf.addResource(new OzoneConfiguration());
+    int res = ToolRunner.run(new OzoneGetConf(conf), args);
+    System.exit(res);
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/freon/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/freon/package-info.java
new file mode 100644
index 0000000..150c64e
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/freon/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.freon;
+/**
+ * Classes related to Ozone tools.
+ */
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/KSMConfigKeys.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/KSMConfigKeys.java
new file mode 100644
index 0000000..75cf613
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/KSMConfigKeys.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.ozone.OzoneAcl;
+/**
+ * KSM Constants.
+ */
+public final class KSMConfigKeys {
+  /**
+   * Never constructed.
+   */
+  private KSMConfigKeys() {
+  }
+
+
+  public static final String OZONE_KSM_HANDLER_COUNT_KEY =
+      "ozone.ksm.handler.count.key";
+  public static final int OZONE_KSM_HANDLER_COUNT_DEFAULT = 20;
+
+  public static final String OZONE_KSM_ADDRESS_KEY =
+      "ozone.ksm.address";
+  public static final String OZONE_KSM_BIND_HOST_DEFAULT =
+      "0.0.0.0";
+  public static final int OZONE_KSM_PORT_DEFAULT = 9862;
+
+  public static final String OZONE_KSM_HTTP_ENABLED_KEY =
+      "ozone.ksm.http.enabled";
+  public static final String OZONE_KSM_HTTP_BIND_HOST_KEY =
+      "ozone.ksm.http-bind-host";
+  public static final String OZONE_KSM_HTTPS_BIND_HOST_KEY =
+      "ozone.ksm.https-bind-host";
+  public static final String OZONE_KSM_HTTP_ADDRESS_KEY =
+      "ozone.ksm.http-address";
+  public static final String OZONE_KSM_HTTPS_ADDRESS_KEY =
+      "ozone.ksm.https-address";
+  public static final String OZONE_KSM_KEYTAB_FILE =
+      "ozone.ksm.keytab.file";
+  public static final String OZONE_KSM_HTTP_BIND_HOST_DEFAULT = "0.0.0.0";
+  public static final int OZONE_KSM_HTTP_BIND_PORT_DEFAULT = 9874;
+  public static final int OZONE_KSM_HTTPS_BIND_PORT_DEFAULT = 9875;
+
+  // LevelDB cache file uses an off-heap cache in LevelDB of 128 MB.
+  public static final String OZONE_KSM_DB_CACHE_SIZE_MB =
+      "ozone.ksm.db.cache.size.mb";
+  public static final int OZONE_KSM_DB_CACHE_SIZE_DEFAULT = 128;
+
+  public static final String OZONE_KSM_USER_MAX_VOLUME =
+      "ozone.ksm.user.max.volume";
+  public static final int OZONE_KSM_USER_MAX_VOLUME_DEFAULT = 1024;
+
+  // KSM Default user/group permissions
+  public static final String OZONE_KSM_USER_RIGHTS =
+      "ozone.ksm.user.rights";
+  public static final OzoneAcl.OzoneACLRights OZONE_KSM_USER_RIGHTS_DEFAULT =
+      OzoneAcl.OzoneACLRights.READ_WRITE;
+
+  public static final String OZONE_KSM_GROUP_RIGHTS =
+      "ozone.ksm.group.rights";
+  public static final OzoneAcl.OzoneACLRights OZONE_KSM_GROUP_RIGHTS_DEFAULT =
+      OzoneAcl.OzoneACLRights.READ_WRITE;
+
+  public static final String OZONE_KEY_DELETING_LIMIT_PER_TASK =
+      "ozone.key.deleting.limit.per.task";
+  public static final int OZONE_KEY_DELETING_LIMIT_PER_TASK_DEFAULT = 1000;
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmBucketArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmBucketArgs.java
new file mode 100644
index 0000000..1211b50
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmBucketArgs.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.BucketArgs;
+import org.apache.hadoop.ozone.protocolPB.KSMPBHelper;
+
+/**
+ * A class that encapsulates Bucket Arguments.
+ */
+public final class KsmBucketArgs {
+  /**
+   * Name of the volume in which the bucket belongs to.
+   */
+  private final String volumeName;
+  /**
+   * Name of the bucket.
+   */
+  private final String bucketName;
+  /**
+   * ACL's that are to be added for the bucket.
+   */
+  private List<OzoneAcl> addAcls;
+  /**
+   * ACL's that are to be removed from the bucket.
+   */
+  private List<OzoneAcl> removeAcls;
+  /**
+   * Bucket Version flag.
+   */
+  private Boolean isVersionEnabled;
+  /**
+   * Type of storage to be used for this bucket.
+   * [RAM_DISK, SSD, DISK, ARCHIVE]
+   */
+  private StorageType storageType;
+
+  /**
+   * Private constructor, constructed via builder.
+   * @param volumeName - Volume name.
+   * @param bucketName - Bucket name.
+   * @param addAcls - ACL's to be added.
+   * @param removeAcls - ACL's to be removed.
+   * @param isVersionEnabled - Bucket version flag.
+   * @param storageType - Storage type to be used.
+   */
+  private KsmBucketArgs(String volumeName, String bucketName,
+      List<OzoneAcl> addAcls, List<OzoneAcl> removeAcls,
+      Boolean isVersionEnabled, StorageType storageType) {
+    this.volumeName = volumeName;
+    this.bucketName = bucketName;
+    this.addAcls = addAcls;
+    this.removeAcls = removeAcls;
+    this.isVersionEnabled = isVersionEnabled;
+    this.storageType = storageType;
+  }
+
+  /**
+   * Returns the Volume Name.
+   * @return String.
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Returns the Bucket Name.
+   * @return String
+   */
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  /**
+   * Returns the ACL's that are to be added.
+   * @return List<OzoneAclInfo>
+   */
+  public List<OzoneAcl> getAddAcls() {
+    return addAcls;
+  }
+
+  /**
+   * Returns the ACL's that are to be removed.
+   * @return List<OzoneAclInfo>
+   */
+  public List<OzoneAcl> getRemoveAcls() {
+    return removeAcls;
+  }
+
+  /**
+   * Returns true if bucket version is enabled, else false.
+   * @return isVersionEnabled
+   */
+  public Boolean getIsVersionEnabled() {
+    return isVersionEnabled;
+  }
+
+  /**
+   * Returns the type of storage to be used.
+   * @return StorageType
+   */
+  public StorageType getStorageType() {
+    return storageType;
+  }
+
+  /**
+   * Returns new builder class that builds a KsmBucketArgs.
+   *
+   * @return Builder
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Builder for KsmBucketArgs.
+   */
+  public static class Builder {
+    private String volumeName;
+    private String bucketName;
+    private List<OzoneAcl> addAcls;
+    private List<OzoneAcl> removeAcls;
+    private Boolean isVersionEnabled;
+    private StorageType storageType;
+
+    public Builder setVolumeName(String volume) {
+      this.volumeName = volume;
+      return this;
+    }
+
+    public Builder setBucketName(String bucket) {
+      this.bucketName = bucket;
+      return this;
+    }
+
+    public Builder setAddAcls(List<OzoneAcl> acls) {
+      this.addAcls = acls;
+      return this;
+    }
+
+    public Builder setRemoveAcls(List<OzoneAcl> acls) {
+      this.removeAcls = acls;
+      return this;
+    }
+
+    public Builder setIsVersionEnabled(Boolean versionFlag) {
+      this.isVersionEnabled = versionFlag;
+      return this;
+    }
+
+    public Builder setStorageType(StorageType storage) {
+      this.storageType = storage;
+      return this;
+    }
+
+    /**
+     * Constructs the KsmBucketArgs.
+     * @return instance of KsmBucketArgs.
+     */
+    public KsmBucketArgs build() {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      return new KsmBucketArgs(volumeName, bucketName, addAcls,
+          removeAcls, isVersionEnabled, storageType);
+    }
+  }
+
+  /**
+   * Creates BucketArgs protobuf from KsmBucketArgs.
+   */
+  public BucketArgs getProtobuf() {
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setVolumeName(volumeName)
+        .setBucketName(bucketName);
+    if(addAcls != null && !addAcls.isEmpty()) {
+      builder.addAllAddAcls(addAcls.stream().map(
+          KSMPBHelper::convertOzoneAcl).collect(Collectors.toList()));
+    }
+    if(removeAcls != null && !removeAcls.isEmpty()) {
+      builder.addAllRemoveAcls(removeAcls.stream().map(
+          KSMPBHelper::convertOzoneAcl).collect(Collectors.toList()));
+    }
+    if(isVersionEnabled != null) {
+      builder.setIsVersionEnabled(isVersionEnabled);
+    }
+    if(storageType != null) {
+      builder.setStorageType(
+          PBHelperClient.convertStorageType(storageType));
+    }
+    return builder.build();
+  }
+
+  /**
+   * Parses BucketInfo protobuf and creates KsmBucketArgs.
+   * @param bucketArgs
+   * @return instance of KsmBucketArgs
+   */
+  public static KsmBucketArgs getFromProtobuf(BucketArgs bucketArgs) {
+    return new KsmBucketArgs(bucketArgs.getVolumeName(),
+        bucketArgs.getBucketName(),
+        bucketArgs.getAddAclsList().stream().map(
+            KSMPBHelper::convertOzoneAcl).collect(Collectors.toList()),
+        bucketArgs.getRemoveAclsList().stream().map(
+            KSMPBHelper::convertOzoneAcl).collect(Collectors.toList()),
+        bucketArgs.hasIsVersionEnabled() ?
+            bucketArgs.getIsVersionEnabled() : null,
+        bucketArgs.hasStorageType() ? PBHelperClient.convertStorageType(
+            bucketArgs.getStorageType()) : null);
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmBucketInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmBucketInfo.java
new file mode 100644
index 0000000..a49137a
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmBucketInfo.java
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.BucketInfo;
+import org.apache.hadoop.ozone.protocolPB.KSMPBHelper;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * A class that encapsulates Bucket Info.
+ */
+public final class KsmBucketInfo {
+  /**
+   * Name of the volume in which the bucket belongs to.
+   */
+  private final String volumeName;
+  /**
+   * Name of the bucket.
+   */
+  private final String bucketName;
+  /**
+   * ACL Information.
+   */
+  private List<OzoneAcl> acls;
+  /**
+   * Bucket Version flag.
+   */
+  private Boolean isVersionEnabled;
+  /**
+   * Type of storage to be used for this bucket.
+   * [RAM_DISK, SSD, DISK, ARCHIVE]
+   */
+  private StorageType storageType;
+  /**
+   * Creation time of bucket.
+   */
+  private final long creationTime;
+
+  /**
+   * Private constructor, constructed via builder.
+   * @param volumeName - Volume name.
+   * @param bucketName - Bucket name.
+   * @param acls - list of ACLs.
+   * @param isVersionEnabled - Bucket version flag.
+   * @param storageType - Storage type to be used.
+   * @param creationTime - Bucket creation time.
+   */
+  private KsmBucketInfo(String volumeName, String bucketName,
+                        List<OzoneAcl> acls, boolean isVersionEnabled,
+                        StorageType storageType, long creationTime) {
+    this.volumeName = volumeName;
+    this.bucketName = bucketName;
+    this.acls = acls;
+    this.isVersionEnabled = isVersionEnabled;
+    this.storageType = storageType;
+    this.creationTime = creationTime;
+  }
+
+  /**
+   * Returns the Volume Name.
+   * @return String.
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Returns the Bucket Name.
+   * @return String
+   */
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  /**
+   * Returns the ACL's associated with this bucket.
+   * @return List<OzoneAcl>
+   */
+  public List<OzoneAcl> getAcls() {
+    return acls;
+  }
+
+  /**
+   * Returns true if bucket version is enabled, else false.
+   * @return isVersionEnabled
+   */
+  public boolean getIsVersionEnabled() {
+    return isVersionEnabled;
+  }
+
+  /**
+   * Returns the type of storage to be used.
+   * @return StorageType
+   */
+  public StorageType getStorageType() {
+    return storageType;
+  }
+
+  /**
+   * Returns creation time.
+   *
+   * @return long
+   */
+  public long getCreationTime() {
+    return creationTime;
+  }
+
+  /**
+   * Returns new builder class that builds a KsmBucketInfo.
+   *
+   * @return Builder
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Builder for KsmBucketInfo.
+   */
+  public static class Builder {
+    private String volumeName;
+    private String bucketName;
+    private List<OzoneAcl> acls;
+    private Boolean isVersionEnabled;
+    private StorageType storageType;
+    private long creationTime;
+
+    Builder() {
+      //Default values
+      this.acls = new LinkedList<>();
+      this.isVersionEnabled = false;
+      this.storageType = StorageType.DISK;
+    }
+
+    public Builder setVolumeName(String volume) {
+      this.volumeName = volume;
+      return this;
+    }
+
+    public Builder setBucketName(String bucket) {
+      this.bucketName = bucket;
+      return this;
+    }
+
+    public Builder setAcls(List<OzoneAcl> listOfAcls) {
+      this.acls = listOfAcls;
+      return this;
+    }
+
+    public Builder setIsVersionEnabled(Boolean versionFlag) {
+      this.isVersionEnabled = versionFlag;
+      return this;
+    }
+
+    public Builder setStorageType(StorageType storage) {
+      this.storageType = storage;
+      return this;
+    }
+
+    public Builder setCreationTime(long createdOn) {
+      this.creationTime = createdOn;
+      return this;
+    }
+
+    /**
+     * Constructs the KsmBucketInfo.
+     * @return instance of KsmBucketInfo.
+     */
+    public KsmBucketInfo build() {
+      Preconditions.checkNotNull(volumeName);
+      Preconditions.checkNotNull(bucketName);
+      Preconditions.checkNotNull(acls);
+      Preconditions.checkNotNull(isVersionEnabled);
+      Preconditions.checkNotNull(storageType);
+
+      return new KsmBucketInfo(volumeName, bucketName, acls,
+          isVersionEnabled, storageType, creationTime);
+    }
+  }
+
+  /**
+   * Creates BucketInfo protobuf from KsmBucketInfo.
+   */
+  public BucketInfo getProtobuf() {
+    return BucketInfo.newBuilder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .addAllAcls(acls.stream().map(
+            KSMPBHelper::convertOzoneAcl).collect(Collectors.toList()))
+        .setIsVersionEnabled(isVersionEnabled)
+        .setStorageType(PBHelperClient.convertStorageType(
+            storageType))
+        .setCreationTime(creationTime)
+        .build();
+  }
+
+  /**
+   * Parses BucketInfo protobuf and creates KsmBucketInfo.
+   * @param bucketInfo
+   * @return instance of KsmBucketInfo
+   */
+  public static KsmBucketInfo getFromProtobuf(BucketInfo bucketInfo) {
+    return new KsmBucketInfo(
+        bucketInfo.getVolumeName(),
+        bucketInfo.getBucketName(),
+        bucketInfo.getAclsList().stream().map(
+            KSMPBHelper::convertOzoneAcl).collect(Collectors.toList()),
+        bucketInfo.getIsVersionEnabled(),
+        PBHelperClient.convertStorageType(
+            bucketInfo.getStorageType()), bucketInfo.getCreationTime());
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyArgs.java
new file mode 100644
index 0000000..cd17e28
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyArgs.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+
+/**
+ * Args for key. Client use this to specify key's attributes on  key creation
+ * (putKey()).
+ */
+public final class KsmKeyArgs {
+  private final String volumeName;
+  private final String bucketName;
+  private final String keyName;
+  private long dataSize;
+  private final ReplicationType type;
+  private final ReplicationFactor factor;
+
+  private KsmKeyArgs(String volumeName, String bucketName, String keyName,
+      long dataSize, ReplicationType type, ReplicationFactor factor) {
+    this.volumeName = volumeName;
+    this.bucketName = bucketName;
+    this.keyName = keyName;
+    this.dataSize = dataSize;
+    this.type = type;
+    this.factor = factor;
+  }
+
+  public ReplicationType getType() {
+    return type;
+  }
+
+  public ReplicationFactor getFactor() {
+    return factor;
+  }
+
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  public String getKeyName() {
+    return keyName;
+  }
+
+  public long getDataSize() {
+    return dataSize;
+  }
+
+  public void setDataSize(long size) {
+    dataSize = size;
+  }
+
+  /**
+   * Builder class of KsmKeyArgs.
+   */
+  public static class Builder {
+    private String volumeName;
+    private String bucketName;
+    private String keyName;
+    private long dataSize;
+    private ReplicationType type;
+    private ReplicationFactor factor;
+
+
+    public Builder setVolumeName(String volume) {
+      this.volumeName = volume;
+      return this;
+    }
+
+    public Builder setBucketName(String bucket) {
+      this.bucketName = bucket;
+      return this;
+    }
+
+    public Builder setKeyName(String key) {
+      this.keyName = key;
+      return this;
+    }
+
+    public Builder setDataSize(long size) {
+      this.dataSize = size;
+      return this;
+    }
+
+    public Builder setType(ReplicationType replicationType) {
+      this.type = replicationType;
+      return this;
+    }
+
+    public Builder setFactor(ReplicationFactor replicationFactor) {
+      this.factor = replicationFactor;
+      return this;
+    }
+
+    public KsmKeyArgs build() {
+      return new KsmKeyArgs(volumeName, bucketName, keyName, dataSize,
+          type, factor);
+    }
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyInfo.java
new file mode 100644
index 0000000..41d523c
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyInfo.java
@@ -0,0 +1,243 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.KeyInfo;
+import org.apache.hadoop.util.Time;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Args for key block. The block instance for the key requested in putKey.
+ * This is returned from KSM to client, and client use class to talk to
+ * datanode. Also, this is the metadata written to ksm.db on server side.
+ */
+public final class KsmKeyInfo {
+  private final String volumeName;
+  private final String bucketName;
+  // name of key client specified
+  private final String keyName;
+  private long dataSize;
+  private List<KsmKeyLocationInfoGroup> keyLocationVersions;
+  private final long creationTime;
+  private long modificationTime;
+
+  private KsmKeyInfo(String volumeName, String bucketName, String keyName,
+      List<KsmKeyLocationInfoGroup> versions, long dataSize,
+      long creationTime, long modificationTime) {
+    this.volumeName = volumeName;
+    this.bucketName = bucketName;
+    this.keyName = keyName;
+    this.dataSize = dataSize;
+    // it is important that the versions are ordered from old to new.
+    // Do this sanity check when versions got loaded on creating KsmKeyInfo.
+    // TODO : this is not necessary, here only because versioning is still a
+    // work in-progress, remove this following check when versioning is
+    // complete and prove correctly functioning
+    long currentVersion = -1;
+    for (KsmKeyLocationInfoGroup version : versions) {
+      Preconditions.checkArgument(
+            currentVersion + 1 == version.getVersion());
+      currentVersion = version.getVersion();
+    }
+    this.keyLocationVersions = versions;
+    this.creationTime = creationTime;
+    this.modificationTime = modificationTime;
+  }
+
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  public String getKeyName() {
+    return keyName;
+  }
+
+  public long getDataSize() {
+    return dataSize;
+  }
+
+  public void setDataSize(long size) {
+    this.dataSize = size;
+  }
+
+  public synchronized KsmKeyLocationInfoGroup getLatestVersionLocations()
+      throws IOException {
+    return keyLocationVersions.size() == 0? null :
+        keyLocationVersions.get(keyLocationVersions.size() - 1);
+  }
+
+  public List<KsmKeyLocationInfoGroup> getKeyLocationVersions() {
+    return keyLocationVersions;
+  }
+
+  public void updateModifcationTime() {
+    this.modificationTime = Time.monotonicNow();
+  }
+
+  /**
+   * Append a set of blocks to the latest version. Note that these blocks are
+   * part of the latest version, not a new version.
+   *
+   * @param newLocationList the list of new blocks to be added.
+   * @throws IOException
+   */
+  public synchronized void appendNewBlocks(
+      List<KsmKeyLocationInfo> newLocationList) throws IOException {
+    if (keyLocationVersions.size() == 0) {
+      throw new IOException("Appending new block, but no version exist");
+    }
+    KsmKeyLocationInfoGroup currentLatestVersion =
+        keyLocationVersions.get(keyLocationVersions.size() - 1);
+    currentLatestVersion.appendNewBlocks(newLocationList);
+    setModificationTime(Time.now());
+  }
+
+  /**
+   * Add a new set of blocks. The new blocks will be added as appending a new
+   * version to the all version list.
+   *
+   * @param newLocationList the list of new blocks to be added.
+   * @throws IOException
+   */
+  public synchronized long addNewVersion(
+      List<KsmKeyLocationInfo> newLocationList) throws IOException {
+    long latestVersionNum;
+    if (keyLocationVersions.size() == 0) {
+      // no version exist, these blocks are the very first version.
+      keyLocationVersions.add(new KsmKeyLocationInfoGroup(0, newLocationList));
+      latestVersionNum = 0;
+    } else {
+      // it is important that the new version are always at the tail of the list
+      KsmKeyLocationInfoGroup currentLatestVersion =
+          keyLocationVersions.get(keyLocationVersions.size() - 1);
+      // the new version is created based on the current latest version
+      KsmKeyLocationInfoGroup newVersion =
+          currentLatestVersion.generateNextVersion(newLocationList);
+      keyLocationVersions.add(newVersion);
+      latestVersionNum = newVersion.getVersion();
+    }
+    setModificationTime(Time.now());
+    return latestVersionNum;
+  }
+
+  public long getCreationTime() {
+    return creationTime;
+  }
+
+  public long getModificationTime() {
+    return modificationTime;
+  }
+
+  public void setModificationTime(long modificationTime) {
+    this.modificationTime = modificationTime;
+  }
+
+  /**
+   * Builder of KsmKeyInfo.
+   */
+  public static class Builder {
+    private String volumeName;
+    private String bucketName;
+    private String keyName;
+    private long dataSize;
+    private List<KsmKeyLocationInfoGroup> ksmKeyLocationInfoGroups;
+    private long creationTime;
+    private long modificationTime;
+
+    public Builder setVolumeName(String volume) {
+      this.volumeName = volume;
+      return this;
+    }
+
+    public Builder setBucketName(String bucket) {
+      this.bucketName = bucket;
+      return this;
+    }
+
+    public Builder setKeyName(String key) {
+      this.keyName = key;
+      return this;
+    }
+
+    public Builder setKsmKeyLocationInfos(
+        List<KsmKeyLocationInfoGroup> ksmKeyLocationInfoList) {
+      this.ksmKeyLocationInfoGroups = ksmKeyLocationInfoList;
+      return this;
+    }
+
+    public Builder setDataSize(long size) {
+      this.dataSize = size;
+      return this;
+    }
+
+    public Builder setCreationTime(long crTime) {
+      this.creationTime = crTime;
+      return this;
+    }
+
+    public Builder setModificationTime(long mTime) {
+      this.modificationTime = mTime;
+      return this;
+    }
+
+    public KsmKeyInfo build() {
+      return new KsmKeyInfo(
+          volumeName, bucketName, keyName, ksmKeyLocationInfoGroups,
+          dataSize, creationTime, modificationTime);
+    }
+  }
+
+  public KeyInfo getProtobuf() {
+    long latestVersion = keyLocationVersions.size() == 0 ? -1 :
+        keyLocationVersions.get(keyLocationVersions.size() - 1).getVersion();
+    return KeyInfo.newBuilder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .setDataSize(dataSize)
+        .addAllKeyLocationList(keyLocationVersions.stream()
+            .map(KsmKeyLocationInfoGroup::getProtobuf)
+            .collect(Collectors.toList()))
+        .setLatestVersion(latestVersion)
+        .setCreationTime(creationTime)
+        .setModificationTime(modificationTime)
+        .build();
+  }
+
+  public static KsmKeyInfo getFromProtobuf(KeyInfo keyInfo) {
+    return new KsmKeyInfo(
+        keyInfo.getVolumeName(),
+        keyInfo.getBucketName(),
+        keyInfo.getKeyName(),
+        keyInfo.getKeyLocationListList().stream()
+            .map(KsmKeyLocationInfoGroup::getFromProtobuf)
+            .collect(Collectors.toList()),
+        keyInfo.getDataSize(),
+        keyInfo.getCreationTime(),
+        keyInfo.getModificationTime());
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyLocationInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyLocationInfo.java
new file mode 100644
index 0000000..9d24b30
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyLocationInfo.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.KeyLocation;
+
+/**
+ * One key can be too huge to fit in one container. In which case it gets split
+ * into a number of subkeys. This class represents one such subkey instance.
+ */
+public final class KsmKeyLocationInfo {
+  private final String containerName;
+  // name of the block id SCM assigned for the key
+  private final String blockID;
+  private final boolean shouldCreateContainer;
+  // the id of this subkey in all the subkeys.
+  private final long length;
+  private final long offset;
+  // the version number indicating when this block was added
+  private long createVersion;
+
+  private KsmKeyLocationInfo(String containerName,
+      String blockID, boolean shouldCreateContainer,
+      long length, long offset) {
+    this.containerName = containerName;
+    this.blockID = blockID;
+    this.shouldCreateContainer = shouldCreateContainer;
+    this.length = length;
+    this.offset = offset;
+  }
+
+  public void setCreateVersion(long version) {
+    createVersion = version;
+  }
+
+  public long getCreateVersion() {
+    return createVersion;
+  }
+
+  public String getContainerName() {
+    return containerName;
+  }
+
+  public String getBlockID() {
+    return blockID;
+  }
+
+  public boolean getShouldCreateContainer() {
+    return shouldCreateContainer;
+  }
+
+  public long getLength() {
+    return length;
+  }
+
+  public long getOffset() {
+    return offset;
+  }
+
+  /**
+   * Builder of KsmKeyLocationInfo.
+   */
+  public static class Builder {
+    private String containerName;
+    private String blockID;
+    private boolean shouldCreateContainer;
+    private long length;
+    private long offset;
+
+    public Builder setContainerName(String container) {
+      this.containerName = container;
+      return this;
+    }
+
+    public Builder setBlockID(String block) {
+      this.blockID = block;
+      return this;
+    }
+
+    public Builder setShouldCreateContainer(boolean create) {
+      this.shouldCreateContainer = create;
+      return this;
+    }
+
+    public Builder setLength(long len) {
+      this.length = len;
+      return this;
+    }
+
+    public Builder setOffset(long off) {
+      this.offset = off;
+      return this;
+    }
+
+    public KsmKeyLocationInfo build() {
+      return new KsmKeyLocationInfo(containerName, blockID,
+          shouldCreateContainer, length, offset);
+    }
+  }
+
+  public KeyLocation getProtobuf() {
+    return KeyLocation.newBuilder()
+        .setContainerName(containerName)
+        .setBlockID(blockID)
+        .setShouldCreateContainer(shouldCreateContainer)
+        .setLength(length)
+        .setOffset(offset)
+        .setCreateVersion(createVersion)
+        .build();
+  }
+
+  public static KsmKeyLocationInfo getFromProtobuf(KeyLocation keyLocation) {
+    KsmKeyLocationInfo info = new KsmKeyLocationInfo(
+        keyLocation.getContainerName(),
+        keyLocation.getBlockID(),
+        keyLocation.getShouldCreateContainer(),
+        keyLocation.getLength(),
+        keyLocation.getOffset());
+    info.setCreateVersion(keyLocation.getCreateVersion());
+    return info;
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyLocationInfoGroup.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyLocationInfoGroup.java
new file mode 100644
index 0000000..bef65ec
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmKeyLocationInfoGroup.java
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.KeyLocationList;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * A list of key locations. This class represents one single version of the
+ * blocks of a key.
+ */
+public class KsmKeyLocationInfoGroup {
+  private final long version;
+  private final List<KsmKeyLocationInfo> locationList;
+
+  public KsmKeyLocationInfoGroup(long version,
+      List<KsmKeyLocationInfo> locations) {
+    this.version = version;
+    this.locationList = locations;
+  }
+
+  /**
+   * Return only the blocks that are created in the most recent version.
+   *
+   * @return the list of blocks that are created in the latest version.
+   */
+  public List<KsmKeyLocationInfo> getBlocksLatestVersionOnly() {
+    List<KsmKeyLocationInfo> list = new ArrayList<>();
+    locationList.stream().filter(x -> x.getCreateVersion() == version)
+        .forEach(list::add);
+    return list;
+  }
+
+  public long getVersion() {
+    return version;
+  }
+
+  public List<KsmKeyLocationInfo> getLocationList() {
+    return locationList;
+  }
+
+  public KeyLocationList getProtobuf() {
+    return KeyLocationList.newBuilder()
+        .setVersion(version)
+        .addAllKeyLocations(
+            locationList.stream().map(KsmKeyLocationInfo::getProtobuf)
+                .collect(Collectors.toList()))
+        .build();
+  }
+
+  public static KsmKeyLocationInfoGroup getFromProtobuf(
+      KeyLocationList keyLocationList) {
+    return new KsmKeyLocationInfoGroup(
+        keyLocationList.getVersion(),
+        keyLocationList.getKeyLocationsList().stream()
+            .map(KsmKeyLocationInfo::getFromProtobuf)
+            .collect(Collectors.toList()));
+  }
+
+  /**
+   * Given a new block location, generate a new version list based upon this
+   * one.
+   *
+   * @param newLocationList a list of new location to be added.
+   * @return
+   */
+  KsmKeyLocationInfoGroup generateNextVersion(
+      List<KsmKeyLocationInfo> newLocationList) throws IOException {
+    // TODO : revisit if we can do this method more efficiently
+    // one potential inefficiency here is that later version always include
+    // older ones. e.g. v1 has B1, then v2, v3...will all have B1 and only add
+    // more
+    List<KsmKeyLocationInfo> newList = new ArrayList<>();
+    newList.addAll(locationList);
+    for (KsmKeyLocationInfo newInfo : newLocationList) {
+      // all these new blocks will have addVersion of current version + 1
+      newInfo.setCreateVersion(version + 1);
+      newList.add(newInfo);
+    }
+    return new KsmKeyLocationInfoGroup(version + 1, newList);
+  }
+
+  void appendNewBlocks(List<KsmKeyLocationInfo> newLocationList)
+      throws IOException {
+    for (KsmKeyLocationInfo info : newLocationList) {
+      info.setCreateVersion(version);
+      locationList.add(info);
+    }
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("version:").append(version).append(" ");
+    for (KsmKeyLocationInfo kli : locationList) {
+      sb.append(kli.getBlockID()).append(" || ");
+    }
+    return sb.toString();
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmOzoneAclMap.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmOzoneAclMap.java
new file mode 100644
index 0000000..7d9efad
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmOzoneAclMap.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo.OzoneAclRights;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo.OzoneAclType;
+
+import java.util.List;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+/**
+ * This helper class keeps a map of all user and their permissions.
+ */
+public class KsmOzoneAclMap {
+  // per Acl Type user:rights map
+  private ArrayList<Map<String, OzoneAclRights>> aclMaps;
+
+  KsmOzoneAclMap() {
+    aclMaps = new ArrayList<>();
+    for (OzoneAclType aclType : OzoneAclType.values()) {
+      aclMaps.add(aclType.ordinal(), new HashMap<>());
+    }
+  }
+
+  private Map<String, OzoneAclRights> getMap(OzoneAclType type) {
+    return aclMaps.get(type.ordinal());
+  }
+
+  // For a given acl type and user, get the stored acl
+  private OzoneAclRights getAcl(OzoneAclType type, String user) {
+    return getMap(type).get(user);
+  }
+
+  // Add a new acl to the map
+  public void addAcl(OzoneAclInfo acl) {
+    getMap(acl.getType()).put(acl.getName(), acl.getRights());
+  }
+
+  // for a given acl, check if the user has access rights
+  public boolean hasAccess(OzoneAclInfo acl) {
+    OzoneAclRights storedRights = getAcl(acl.getType(), acl.getName());
+    if (storedRights != null) {
+      switch (acl.getRights()) {
+      case READ:
+        return (storedRights == OzoneAclRights.READ)
+            || (storedRights == OzoneAclRights.READ_WRITE);
+      case WRITE:
+        return (storedRights == OzoneAclRights.WRITE)
+            || (storedRights == OzoneAclRights.READ_WRITE);
+      case READ_WRITE:
+        return (storedRights == OzoneAclRights.READ_WRITE);
+      default:
+        return false;
+      }
+    } else {
+      return false;
+    }
+  }
+
+  // Convert this map to OzoneAclInfo Protobuf List
+  public List<OzoneAclInfo> ozoneAclGetProtobuf() {
+    List<OzoneAclInfo> aclList = new LinkedList<>();
+    for (OzoneAclType type: OzoneAclType.values()) {
+      for (Map.Entry<String, OzoneAclRights> entry :
+          aclMaps.get(type.ordinal()).entrySet()) {
+        OzoneAclInfo aclInfo = OzoneAclInfo.newBuilder()
+            .setName(entry.getKey())
+            .setType(type)
+            .setRights(entry.getValue())
+            .build();
+        aclList.add(aclInfo);
+      }
+    }
+
+    return aclList;
+  }
+
+  // Create map from list of OzoneAclInfos
+  public static KsmOzoneAclMap ozoneAclGetFromProtobuf(
+      List<OzoneAclInfo> aclList) {
+    KsmOzoneAclMap aclMap = new KsmOzoneAclMap();
+    for (OzoneAclInfo acl : aclList) {
+      aclMap.addAcl(acl);
+    }
+    return aclMap;
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmVolumeArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmVolumeArgs.java
new file mode 100644
index 0000000..6b42c27
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/KsmVolumeArgs.java
@@ -0,0 +1,223 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.VolumeInfo;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+
+/**
+ * A class that encapsulates the KsmVolumeArgs Args.
+ */
+public final class KsmVolumeArgs {
+  private final String adminName;
+  private final String ownerName;
+  private final String volume;
+  private final long creationTime;
+  private final long quotaInBytes;
+  private final Map<String, String> keyValueMap;
+  private final KsmOzoneAclMap aclMap;
+
+  /**
+   * Private constructor, constructed via builder.
+   * @param adminName  - Administrator's name.
+   * @param ownerName  - Volume owner's name
+   * @param volume - volume name
+   * @param quotaInBytes - Volume Quota in bytes.
+   * @param keyValueMap - keyValue map.
+   * @param aclMap - User to access rights map.
+   * @param creationTime - Volume creation time.
+   */
+  private KsmVolumeArgs(String adminName, String ownerName, String volume,
+                        long quotaInBytes, Map<String, String> keyValueMap,
+                        KsmOzoneAclMap aclMap, long creationTime) {
+    this.adminName = adminName;
+    this.ownerName = ownerName;
+    this.volume = volume;
+    this.quotaInBytes = quotaInBytes;
+    this.keyValueMap = keyValueMap;
+    this.aclMap = aclMap;
+    this.creationTime = creationTime;
+  }
+
+  /**
+   * Returns the Admin Name.
+   * @return String.
+   */
+  public String getAdminName() {
+    return adminName;
+  }
+
+  /**
+   * Returns the owner Name.
+   * @return String
+   */
+  public String getOwnerName() {
+    return ownerName;
+  }
+
+  /**
+   * Returns the volume Name.
+   * @return String
+   */
+  public String getVolume() {
+    return volume;
+  }
+
+  /**
+   * Returns creation time.
+   * @return long
+   */
+  public long getCreationTime() {
+    return creationTime;
+  }
+
+  /**
+   * Returns Quota in Bytes.
+   * @return long, Quota in bytes.
+   */
+  public long getQuotaInBytes() {
+    return quotaInBytes;
+  }
+
+  public Map<String, String> getKeyValueMap() {
+    return keyValueMap;
+  }
+
+  public KsmOzoneAclMap getAclMap() {
+    return aclMap;
+  }
+  /**
+   * Returns new builder class that builds a KsmVolumeArgs.
+   *
+   * @return Builder
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Builder for KsmVolumeArgs.
+   */
+  public static class Builder {
+    private String adminName;
+    private String ownerName;
+    private String volume;
+    private long creationTime;
+    private long quotaInBytes;
+    private Map<String, String> keyValueMap;
+    private KsmOzoneAclMap aclMap;
+
+    /**
+     * Constructs a builder.
+     */
+    Builder() {
+      keyValueMap = new HashMap<>();
+      aclMap = new KsmOzoneAclMap();
+    }
+
+    public Builder setAdminName(String admin) {
+      this.adminName = admin;
+      return this;
+    }
+
+    public Builder setOwnerName(String owner) {
+      this.ownerName = owner;
+      return this;
+    }
+
+    public Builder setVolume(String volumeName) {
+      this.volume = volumeName;
+      return this;
+    }
+
+    public Builder setCreationTime(long createdOn) {
+      this.creationTime = createdOn;
+      return this;
+    }
+
+    public Builder setQuotaInBytes(long quota) {
+      this.quotaInBytes = quota;
+      return this;
+    }
+
+    public Builder addMetadata(String key, String value) {
+      keyValueMap.put(key, value); // overwrite if present.
+      return this;
+    }
+
+    public Builder addOzoneAcls(OzoneAclInfo acl) throws IOException {
+      aclMap.addAcl(acl);
+      return this;
+    }
+
+    /**
+     * Constructs a CreateVolumeArgument.
+     * @return CreateVolumeArgs.
+     */
+    public KsmVolumeArgs build() {
+      Preconditions.checkNotNull(adminName);
+      Preconditions.checkNotNull(ownerName);
+      Preconditions.checkNotNull(volume);
+      return new KsmVolumeArgs(adminName, ownerName, volume, quotaInBytes,
+          keyValueMap, aclMap, creationTime);
+    }
+  }
+
+  public VolumeInfo getProtobuf() {
+    List<KeyValue> metadataList = new LinkedList<>();
+    for (Map.Entry<String, String> entry : keyValueMap.entrySet()) {
+      metadataList.add(KeyValue.newBuilder().setKey(entry.getKey()).
+          setValue(entry.getValue()).build());
+    }
+    List<OzoneAclInfo> aclList = aclMap.ozoneAclGetProtobuf();
+
+    return VolumeInfo.newBuilder()
+        .setAdminName(adminName)
+        .setOwnerName(ownerName)
+        .setVolume(volume)
+        .setQuotaInBytes(quotaInBytes)
+        .addAllMetadata(metadataList)
+        .addAllVolumeAcls(aclList)
+        .setCreationTime(creationTime)
+        .build();
+  }
+
+  public static KsmVolumeArgs getFromProtobuf(VolumeInfo volInfo) {
+    Map<String, String> kvMap = volInfo.getMetadataList().stream()
+        .collect(Collectors.toMap(KeyValue::getKey,
+            KeyValue::getValue));
+    KsmOzoneAclMap aclMap =
+        KsmOzoneAclMap.ozoneAclGetFromProtobuf(volInfo.getVolumeAclsList());
+
+    return new KsmVolumeArgs(volInfo.getAdminName(), volInfo.getOwnerName(),
+        volInfo.getVolume(), volInfo.getQuotaInBytes(), kvMap, aclMap,
+        volInfo.getCreationTime());
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/OpenKeySession.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/OpenKeySession.java
new file mode 100644
index 0000000..c19c04b
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/OpenKeySession.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+/**
+ * This class represents a open key "session". A session here means a key is
+ * opened by a specific client, the client sends the handler to server, such
+ * that servers can recognize this client, and thus know how to close the key.
+ */
+public class OpenKeySession {
+  private final int id;
+  private final KsmKeyInfo keyInfo;
+  // the version of the key when it is being opened in this session.
+  // a block that has a create version equals to open version means it will
+  // be committed only when this open session is closed.
+  private long openVersion;
+
+  public OpenKeySession(int id, KsmKeyInfo info, long version) {
+    this.id = id;
+    this.keyInfo = info;
+    this.openVersion = version;
+  }
+
+  public long getOpenVersion() {
+    return this.openVersion;
+  }
+
+  public KsmKeyInfo getKeyInfo() {
+    return keyInfo;
+  }
+
+  public int getId() {
+    return id;
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/ServiceInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/ServiceInfo.java
new file mode 100644
index 0000000..e07232d
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/ServiceInfo.java
@@ -0,0 +1,237 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm.helpers;
+
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.ozone.client.rest.response.BucketInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos
+    .ServicePort;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * ServiceInfo holds the config details of Ozone services.
+ */
+public final class ServiceInfo {
+
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(ServiceInfo.class);
+  private static final ObjectWriter WRITER =
+      new ObjectMapper().writerWithDefaultPrettyPrinter();
+
+  /**
+   * Type of node/service.
+   */
+  private NodeType nodeType;
+  /**
+   * Hostname of the node in which the service is running.
+   */
+  private String hostname;
+
+  /**
+   * List of ports the service listens to.
+   */
+  private Map<ServicePort.Type, Integer> ports;
+
+  /**
+   * Default constructor for JSON deserialization.
+   */
+  public ServiceInfo() {}
+
+  /**
+   * Constructs the ServiceInfo for the {@code nodeType}.
+   * @param nodeType type of node/service
+   * @param hostname hostname of the service
+   * @param portList list of ports the service listens to
+   */
+  private ServiceInfo(
+      NodeType nodeType, String hostname, List<ServicePort> portList) {
+    Preconditions.checkNotNull(nodeType);
+    Preconditions.checkNotNull(hostname);
+    this.nodeType = nodeType;
+    this.hostname = hostname;
+    this.ports = new HashMap<>();
+    for (ServicePort port : portList) {
+      ports.put(port.getType(), port.getValue());
+    }
+  }
+
+  /**
+   * Returns the type of node/service.
+   * @return node type
+   */
+  public NodeType getNodeType() {
+    return nodeType;
+  }
+
+  /**
+   * Returns the hostname of the service.
+   * @return hostname
+   */
+  public String getHostname() {
+    return hostname;
+  }
+
+  /**
+   * Returns ServicePort.Type to port mappings.
+   * @return ports
+   */
+  public Map<ServicePort.Type, Integer> getPorts() {
+    return ports;
+  }
+
+  /**
+   * Returns the port for given type, null if the service doesn't support
+   * the type.
+   *
+   * @param type the type of port.
+   *             ex: RPC, HTTP, HTTPS, etc..
+   */
+  @JsonIgnore
+  public int getPort(ServicePort.Type type) {
+    return ports.get(type);
+  }
+
+  /**
+   * Converts {@link ServiceInfo} to KeySpaceManagerProtocolProtos.ServiceInfo.
+   *
+   * @return KeySpaceManagerProtocolProtos.ServiceInfo
+   */
+  @JsonIgnore
+  public KeySpaceManagerProtocolProtos.ServiceInfo getProtobuf() {
+    KeySpaceManagerProtocolProtos.ServiceInfo.Builder builder =
+        KeySpaceManagerProtocolProtos.ServiceInfo.newBuilder();
+    builder.setNodeType(nodeType)
+        .setHostname(hostname)
+        .addAllServicePorts(
+            ports.entrySet().stream()
+                .map(
+                    entry ->
+                        ServicePort.newBuilder()
+                            .setType(entry.getKey())
+                            .setValue(entry.getValue()).build())
+                .collect(Collectors.toList()));
+    return builder.build();
+  }
+
+  /**
+   * Converts KeySpaceManagerProtocolProtos.ServiceInfo to {@link ServiceInfo}.
+   *
+   * @return {@link ServiceInfo}
+   */
+  @JsonIgnore
+  public static ServiceInfo getFromProtobuf(
+      KeySpaceManagerProtocolProtos.ServiceInfo serviceInfo) {
+    return new ServiceInfo(serviceInfo.getNodeType(),
+        serviceInfo.getHostname(),
+        serviceInfo.getServicePortsList());
+  }
+
+  /**
+   * Returns a JSON string of this object.
+   *
+   * @return String - json string
+   * @throws IOException
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  /**
+   * Parse a JSON string into ServiceInfo Object.
+   *
+   * @param jsonString Json String
+   * @return BucketInfo
+   * @throws IOException
+   */
+  public static BucketInfo parse(String jsonString) throws IOException {
+    return READER.readValue(jsonString);
+  }
+
+  /**
+   * Creates a new builder to build {@link ServiceInfo}.
+   * @return {@link ServiceInfo.Builder}
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Builder used to build/construct {@link ServiceInfo}.
+   */
+  public static class Builder {
+
+    private NodeType node;
+    private String host;
+    private List<ServicePort> portList = new ArrayList<>();
+
+
+    /**
+     * Sets the node/service type.
+     * @param nodeType type of node
+     * @return the builder
+     */
+    public Builder setNodeType(NodeType nodeType) {
+      node = nodeType;
+      return this;
+    }
+
+    /**
+     * Sets the hostname of the service.
+     * @param hostname service hostname
+     * @return the builder
+     */
+    public Builder setHostname(String hostname) {
+      host = hostname;
+      return this;
+    }
+
+    /**
+     * Adds the service port to the service port list.
+     * @param servicePort RPC port
+     * @return the builder
+     */
+    public Builder addServicePort(ServicePort servicePort) {
+      portList.add(servicePort);
+      return this;
+    }
+
+
+    /**
+     * Builds and returns {@link ServiceInfo} with the set values.
+     * @return {@link ServiceInfo}
+     */
+    public ServiceInfo build() {
+      return new ServiceInfo(node, host, portList);
+    }
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/VolumeArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/VolumeArgs.java
new file mode 100644
index 0000000..1a3d486
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/VolumeArgs.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
+
+import com.google.common.base.Preconditions;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A class that encapsulates the createVolume Args.
+ */
+public final class VolumeArgs {
+  private final String adminName;
+  private final String ownerName;
+  private final String volume;
+  private final long quotaInBytes;
+  private final Map<String, String> extendedAttributes;
+
+  /**
+   * Private constructor, constructed via builder.
+   *
+   * @param adminName - Administrator name.
+   * @param ownerName - Volume owner's name
+   * @param volume - volume name
+   * @param quotaInBytes - Volume Quota in bytes.
+   * @param keyValueMap - keyValue map.
+   */
+  private VolumeArgs(String adminName, String ownerName, String volume,
+      long quotaInBytes, Map<String, String> keyValueMap) {
+    this.adminName = adminName;
+    this.ownerName = ownerName;
+    this.volume = volume;
+    this.quotaInBytes = quotaInBytes;
+    this.extendedAttributes = keyValueMap;
+  }
+
+  /**
+   * Returns the Admin Name.
+   *
+   * @return String.
+   */
+  public String getAdminName() {
+    return adminName;
+  }
+
+  /**
+   * Returns the owner Name.
+   *
+   * @return String
+   */
+  public String getOwnerName() {
+    return ownerName;
+  }
+
+  /**
+   * Returns the volume Name.
+   *
+   * @return String
+   */
+  public String getVolume() {
+    return volume;
+  }
+
+  /**
+   * Returns Quota in Bytes.
+   *
+   * @return long, Quota in bytes.
+   */
+  public long getQuotaInBytes() {
+    return quotaInBytes;
+  }
+
+  public Map<String, String> getExtendedAttributes() {
+    return extendedAttributes;
+  }
+
+  static class Builder {
+    private String adminName;
+    private String ownerName;
+    private String volume;
+    private long quotaInBytes;
+    private Map<String, String> extendedAttributes;
+
+    /**
+     * Constructs a builder.
+     */
+    Builder() {
+      extendedAttributes = new HashMap<>();
+    }
+
+    public void setAdminName(String adminName) {
+      this.adminName = adminName;
+    }
+
+    public void setOwnerName(String ownerName) {
+      this.ownerName = ownerName;
+    }
+
+    public void setVolume(String volume) {
+      this.volume = volume;
+    }
+
+    public void setQuotaInBytes(long quotaInBytes) {
+      this.quotaInBytes = quotaInBytes;
+    }
+
+    public void addMetadata(String key, String value) {
+      extendedAttributes.put(key, value); // overwrite if present.
+    }
+
+    /**
+     * Constructs a CreateVolumeArgument.
+     *
+     * @return CreateVolumeArgs.
+     */
+    public VolumeArgs build() {
+      Preconditions.checkNotNull(adminName);
+      Preconditions.checkNotNull(ownerName);
+      Preconditions.checkNotNull(volume);
+      return new VolumeArgs(adminName, ownerName, volume, quotaInBytes,
+          extendedAttributes);
+    }
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/package-info.java
new file mode 100644
index 0000000..ce627a5
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/helpers/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.helpers;
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/package-info.java
new file mode 100644
index 0000000..7698ee1
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+/**
+ This package contains client side protocol library to communicate with KSM.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocol/KeySpaceManagerProtocol.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocol/KeySpaceManagerProtocol.java
new file mode 100644
index 0000000..5da5a27
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocol/KeySpaceManagerProtocol.java
@@ -0,0 +1,245 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.protocol;
+
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Protocol to talk to KSM.
+ */
+public interface KeySpaceManagerProtocol {
+
+  /**
+   * Creates a volume.
+   * @param args - Arguments to create Volume.
+   * @throws IOException
+   */
+  void createVolume(KsmVolumeArgs args) throws IOException;
+
+  /**
+   * Changes the owner of a volume.
+   * @param volume  - Name of the volume.
+   * @param owner - Name of the owner.
+   * @throws IOException
+   */
+  void setOwner(String volume, String owner) throws IOException;
+
+  /**
+   * Changes the Quota on a volume.
+   * @param volume - Name of the volume.
+   * @param quota - Quota in bytes.
+   * @throws IOException
+   */
+  void setQuota(String volume, long quota) throws IOException;
+
+  /**
+   * Checks if the specified user can access this volume.
+   * @param volume - volume
+   * @param userAcl - user acls which needs to be checked for access
+   * @return true if the user has required access for the volume,
+   *         false otherwise
+   * @throws IOException
+   */
+  boolean checkVolumeAccess(String volume, OzoneAclInfo userAcl)
+      throws IOException;
+
+  /**
+   * Gets the volume information.
+   * @param volume - Volume name.
+   * @return VolumeArgs or exception is thrown.
+   * @throws IOException
+   */
+  KsmVolumeArgs getVolumeInfo(String volume) throws IOException;
+
+  /**
+   * Deletes an existing empty volume.
+   * @param volume - Name of the volume.
+   * @throws IOException
+   */
+  void deleteVolume(String volume) throws IOException;
+
+  /**
+   * Lists volume owned by a specific user.
+   * @param userName - user name
+   * @param prefix  - Filter prefix -- Return only entries that match this.
+   * @param prevKey - Previous key -- List starts from the next from the prevkey
+   * @param maxKeys - Max number of keys to return.
+   * @return List of Volumes.
+   * @throws IOException
+   */
+  List<KsmVolumeArgs> listVolumeByUser(String userName, String prefix, String
+      prevKey, int maxKeys) throws IOException;
+
+  /**
+   * Lists volume all volumes in the cluster.
+   * @param prefix  - Filter prefix -- Return only entries that match this.
+   * @param prevKey - Previous key -- List starts from the next from the prevkey
+   * @param maxKeys - Max number of keys to return.
+   * @return List of Volumes.
+   * @throws IOException
+   */
+  List<KsmVolumeArgs> listAllVolumes(String prefix, String
+      prevKey, int maxKeys) throws IOException;
+
+  /**
+   * Creates a bucket.
+   * @param bucketInfo - BucketInfo to create Bucket.
+   * @throws IOException
+   */
+  void createBucket(KsmBucketInfo bucketInfo) throws IOException;
+
+  /**
+   * Gets the bucket information.
+   * @param volumeName - Volume name.
+   * @param bucketName - Bucket name.
+   * @return KsmBucketInfo or exception is thrown.
+   * @throws IOException
+   */
+  KsmBucketInfo getBucketInfo(String volumeName, String bucketName)
+      throws IOException;
+
+  /**
+   * Sets bucket property from args.
+   * @param args - BucketArgs.
+   * @throws IOException
+   */
+  void setBucketProperty(KsmBucketArgs args) throws IOException;
+
+  /**
+   * Open the given key and return an open key session.
+   *
+   * @param args the args of the key.
+   * @return OpenKeySession instance that client uses to talk to container.
+   * @throws IOException
+   */
+  OpenKeySession openKey(KsmKeyArgs args) throws IOException;
+
+  /**
+   * Commit a key. This will make the change from the client visible. The client
+   * is identified by the clientID.
+   *
+   * @param args the key to commit
+   * @param clientID the client identification
+   * @throws IOException
+   */
+  void commitKey(KsmKeyArgs args, int clientID) throws IOException;
+
+  /**
+   * Allocate a new block, it is assumed that the client is having an open key
+   * session going on. This block will be appended to this open key session.
+   *
+   * @param args the key to append
+   * @param clientID the client identification
+   * @return an allocated block
+   * @throws IOException
+   */
+  KsmKeyLocationInfo allocateBlock(KsmKeyArgs args, int clientID)
+      throws IOException;
+
+  /**
+   * Look up for the container of an existing key.
+   *
+   * @param args the args of the key.
+   * @return KsmKeyInfo isntacne that client uses to talk to container.
+   * @throws IOException
+   */
+  KsmKeyInfo lookupKey(KsmKeyArgs args) throws IOException;
+
+  /**
+   * Deletes an existing key.
+   *
+   * @param args the args of the key.
+   * @throws IOException
+   */
+  void deleteKey(KsmKeyArgs args) throws IOException;
+
+  /**
+   * Deletes an existing empty bucket from volume.
+   * @param volume - Name of the volume.
+   * @param bucket - Name of the bucket.
+   * @throws IOException
+   */
+  void deleteBucket(String volume, String bucket) throws IOException;
+
+  /**
+   * Returns a list of buckets represented by {@link KsmBucketInfo}
+   * in the given volume. Argument volumeName is required, others
+   * are optional.
+   *
+   * @param volumeName
+   *   the name of the volume.
+   * @param startBucketName
+   *   the start bucket name, only the buckets whose name is
+   *   after this value will be included in the result.
+   * @param bucketPrefix
+   *   bucket name prefix, only the buckets whose name has
+   *   this prefix will be included in the result.
+   * @param maxNumOfBuckets
+   *   the maximum number of buckets to return. It ensures
+   *   the size of the result will not exceed this limit.
+   * @return a list of buckets.
+   * @throws IOException
+   */
+  List<KsmBucketInfo> listBuckets(String volumeName,
+      String startBucketName, String bucketPrefix, int maxNumOfBuckets)
+      throws IOException;
+
+  /**
+   * Returns a list of keys represented by {@link KsmKeyInfo}
+   * in the given bucket. Argument volumeName, bucketName is required,
+   * others are optional.
+   *
+   * @param volumeName
+   *   the name of the volume.
+   * @param bucketName
+   *   the name of the bucket.
+   * @param startKeyName
+   *   the start key name, only the keys whose name is
+   *   after this value will be included in the result.
+   * @param keyPrefix
+   *   key name prefix, only the keys whose name has
+   *   this prefix will be included in the result.
+   * @param maxKeys
+   *   the maximum number of keys to return. It ensures
+   *   the size of the result will not exceed this limit.
+   * @return a list of keys.
+   * @throws IOException
+   */
+  List<KsmKeyInfo> listKeys(String volumeName,
+      String bucketName, String startKeyName, String keyPrefix, int maxKeys)
+      throws IOException;
+
+  /**
+   * Returns list of Ozone services with its configuration details.
+   *
+   * @return list of Ozone services
+   * @throws IOException
+   */
+  List<ServiceInfo> getServiceList() throws IOException;
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocol/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocol/package-info.java
new file mode 100644
index 0000000..f77e5fd
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocol/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm.protocol;
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/KeySpaceManagerProtocolClientSideTranslatorPB.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/KeySpaceManagerProtocolClientSideTranslatorPB.java
new file mode 100644
index 0000000..cc215cf
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/KeySpaceManagerProtocolClientSideTranslatorPB.java
@@ -0,0 +1,744 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.protocolPB;
+
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.ProtocolTranslator;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.ksm.protocol.KeySpaceManagerProtocol;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.AllocateBlockRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.AllocateBlockResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CommitKeyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CommitKeyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.BucketArgs;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.BucketInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateBucketRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateBucketResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoBucketRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoBucketResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetBucketPropertyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetBucketPropertyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteBucketRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteBucketResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.LocateKeyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.LocateKeyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.KeyArgs;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetVolumePropertyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetVolumePropertyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CheckVolumeAccessRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CheckVolumeAccessResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListBucketsRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListBucketsResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListKeysRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListKeysResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.VolumeInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.Status;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServiceListRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServiceListResponse;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.stream.Collectors;
+
+/**
+ *  The client side implementation of KeySpaceManagerProtocol.
+ */
+
+@InterfaceAudience.Private
+public final class KeySpaceManagerProtocolClientSideTranslatorPB
+    implements KeySpaceManagerProtocol, ProtocolTranslator, Closeable {
+
+  /**
+   * RpcController is not used and hence is set to null.
+   */
+  private static final RpcController NULL_RPC_CONTROLLER = null;
+
+  private final KeySpaceManagerProtocolPB rpcProxy;
+
+  /**
+   * Constructor for KeySpaceManger Client.
+   * @param rpcProxy
+   */
+  public KeySpaceManagerProtocolClientSideTranslatorPB(
+      KeySpaceManagerProtocolPB rpcProxy) {
+    this.rpcProxy = rpcProxy;
+  }
+
+  /**
+   * Closes this stream and releases any system resources associated
+   * with it. If the stream is already closed then invoking this
+   * method has no effect.
+   * <p>
+   * <p> As noted in {@link AutoCloseable#close()}, cases where the
+   * close may fail require careful attention. It is strongly advised
+   * to relinquish the underlying resources and to internally
+   * <em>mark</em> the {@code Closeable} as closed, prior to throwing
+   * the {@code IOException}.
+   *
+   * @throws IOException if an I/O error occurs
+   */
+  @Override
+  public void close() throws IOException {
+
+  }
+
+  /**
+   * Creates a volume.
+   *
+   * @param args - Arguments to create Volume.
+   * @throws IOException
+   */
+  @Override
+  public void createVolume(KsmVolumeArgs args) throws IOException {
+    CreateVolumeRequest.Builder req =
+        CreateVolumeRequest.newBuilder();
+    VolumeInfo volumeInfo = args.getProtobuf();
+    req.setVolumeInfo(volumeInfo);
+
+    final CreateVolumeResponse resp;
+    try {
+      resp = rpcProxy.createVolume(NULL_RPC_CONTROLLER,
+          req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+
+    if (resp.getStatus() != Status.OK) {
+      throw new
+          IOException("Volume creation failed, error:" + resp.getStatus());
+    }
+  }
+
+  /**
+   * Changes the owner of a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param owner - Name of the owner.
+   * @throws IOException
+   */
+  @Override
+  public void setOwner(String volume, String owner) throws IOException {
+    SetVolumePropertyRequest.Builder req =
+        SetVolumePropertyRequest.newBuilder();
+    req.setVolumeName(volume).setOwnerName(owner);
+    final SetVolumePropertyResponse resp;
+    try {
+      resp = rpcProxy.setVolumeProperty(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new
+          IOException("Volume owner change failed, error:" + resp.getStatus());
+    }
+  }
+
+  /**
+   * Changes the Quota on a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param quota - Quota in bytes.
+   * @throws IOException
+   */
+  @Override
+  public void setQuota(String volume, long quota) throws IOException {
+    SetVolumePropertyRequest.Builder req =
+        SetVolumePropertyRequest.newBuilder();
+    req.setVolumeName(volume).setQuotaInBytes(quota);
+    final SetVolumePropertyResponse resp;
+    try {
+      resp = rpcProxy.setVolumeProperty(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new
+          IOException("Volume quota change failed, error:" + resp.getStatus());
+    }
+  }
+
+  /**
+   * Checks if the specified user can access this volume.
+   *
+   * @param volume - volume
+   * @param userAcl - user acls which needs to be checked for access
+   * @return true if the user has required access for the volume,
+   *         false otherwise
+   * @throws IOException
+   */
+  @Override
+  public boolean checkVolumeAccess(String volume, OzoneAclInfo userAcl) throws
+      IOException {
+    CheckVolumeAccessRequest.Builder req =
+        CheckVolumeAccessRequest.newBuilder();
+    req.setVolumeName(volume).setUserAcl(userAcl);
+    final CheckVolumeAccessResponse resp;
+    try {
+      resp = rpcProxy.checkVolumeAccess(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+
+    if (resp.getStatus() == Status.ACCESS_DENIED) {
+      return false;
+    } else if (resp.getStatus() == Status.OK) {
+      return true;
+    } else {
+      throw new
+          IOException("Check Volume Access failed, error:" + resp.getStatus());
+    }
+  }
+
+  /**
+   * Gets the volume information.
+   *
+   * @param volume - Volume name.
+   * @return KsmVolumeArgs or exception is thrown.
+   * @throws IOException
+   */
+  @Override
+  public KsmVolumeArgs getVolumeInfo(String volume) throws IOException {
+    InfoVolumeRequest.Builder req = InfoVolumeRequest.newBuilder();
+    req.setVolumeName(volume);
+    final InfoVolumeResponse resp;
+    try {
+      resp = rpcProxy.infoVolume(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new
+          IOException("Info Volume failed, error:" + resp.getStatus());
+    }
+    return KsmVolumeArgs.getFromProtobuf(resp.getVolumeInfo());
+  }
+
+  /**
+   * Deletes an existing empty volume.
+   *
+   * @param volume - Name of the volume.
+   * @throws IOException
+   */
+  @Override
+  public void deleteVolume(String volume) throws IOException {
+    DeleteVolumeRequest.Builder req = DeleteVolumeRequest.newBuilder();
+    req.setVolumeName(volume);
+    final DeleteVolumeResponse resp;
+    try {
+      resp = rpcProxy.deleteVolume(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new
+          IOException("Delete Volume failed, error:" + resp.getStatus());
+    }
+  }
+
+  /**
+   * Lists volume owned by a specific user.
+   *
+   * @param userName - user name
+   * @param prefix - Filter prefix -- Return only entries that match this.
+   * @param prevKey - Previous key -- List starts from the next from the
+   * prevkey
+   * @param maxKeys - Max number of keys to return.
+   * @return List of Volumes.
+   * @throws IOException
+   */
+  @Override
+  public List<KsmVolumeArgs> listVolumeByUser(String userName, String prefix,
+                                              String prevKey, int maxKeys)
+      throws IOException {
+    ListVolumeRequest.Builder builder = ListVolumeRequest.newBuilder();
+    if (!Strings.isNullOrEmpty(prefix)) {
+      builder.setPrefix(prefix);
+    }
+    if (!Strings.isNullOrEmpty(prevKey)) {
+      builder.setPrevKey(prevKey);
+    }
+    builder.setMaxKeys(maxKeys);
+    builder.setUserName(userName);
+    builder.setScope(ListVolumeRequest.Scope.VOLUMES_BY_USER);
+    return listVolume(builder.build());
+  }
+
+  /**
+   * Lists volume all volumes in the cluster.
+   *
+   * @param prefix - Filter prefix -- Return only entries that match this.
+   * @param prevKey - Previous key -- List starts from the next from the
+   * prevkey
+   * @param maxKeys - Max number of keys to return.
+   * @return List of Volumes.
+   * @throws IOException
+   */
+  @Override
+  public List<KsmVolumeArgs> listAllVolumes(String prefix, String prevKey,
+      int maxKeys) throws IOException {
+    ListVolumeRequest.Builder builder = ListVolumeRequest.newBuilder();
+    if (!Strings.isNullOrEmpty(prefix)) {
+      builder.setPrefix(prefix);
+    }
+    if (!Strings.isNullOrEmpty(prevKey)) {
+      builder.setPrevKey(prevKey);
+    }
+    builder.setMaxKeys(maxKeys);
+    builder.setScope(ListVolumeRequest.Scope.VOLUMES_BY_CLUSTER);
+    return listVolume(builder.build());
+  }
+
+  private List<KsmVolumeArgs> listVolume(ListVolumeRequest request)
+      throws IOException {
+    final ListVolumeResponse resp;
+    try {
+      resp = rpcProxy.listVolumes(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("List volume failed, error: "
+          + resp.getStatus());
+    }
+
+    List<KsmVolumeArgs> result = Lists.newArrayList();
+    for (VolumeInfo volInfo : resp.getVolumeInfoList()) {
+      KsmVolumeArgs volArgs = KsmVolumeArgs.getFromProtobuf(volInfo);
+      result.add(volArgs);
+    }
+
+    return resp.getVolumeInfoList().stream()
+        .map(item -> KsmVolumeArgs.getFromProtobuf(item))
+        .collect(Collectors.toList());
+  }
+
+  /**
+   * Creates a bucket.
+   *
+   * @param bucketInfo - BucketInfo to create bucket.
+   * @throws IOException
+   */
+  @Override
+  public void createBucket(KsmBucketInfo bucketInfo) throws IOException {
+    CreateBucketRequest.Builder req =
+        CreateBucketRequest.newBuilder();
+    BucketInfo bucketInfoProtobuf = bucketInfo.getProtobuf();
+    req.setBucketInfo(bucketInfoProtobuf);
+
+    final CreateBucketResponse resp;
+    try {
+      resp = rpcProxy.createBucket(NULL_RPC_CONTROLLER,
+          req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("Bucket creation failed, error: "
+          + resp.getStatus());
+    }
+  }
+
+  /**
+   * Gets the bucket information.
+   *
+   * @param volume - Volume name.
+   * @param bucket - Bucket name.
+   * @return KsmBucketInfo or exception is thrown.
+   * @throws IOException
+   */
+  @Override
+  public KsmBucketInfo getBucketInfo(String volume, String bucket)
+      throws IOException {
+    InfoBucketRequest.Builder req =
+        InfoBucketRequest.newBuilder();
+    req.setVolumeName(volume);
+    req.setBucketName(bucket);
+
+    final InfoBucketResponse resp;
+    try {
+      resp = rpcProxy.infoBucket(NULL_RPC_CONTROLLER,
+          req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() == Status.OK) {
+      return KsmBucketInfo.getFromProtobuf(resp.getBucketInfo());
+    } else {
+      throw new IOException("Info Bucket failed, error: "
+          + resp.getStatus());
+    }
+  }
+
+  /**
+   * Sets bucket property from args.
+   * @param args - BucketArgs.
+   * @throws IOException
+   */
+  @Override
+  public void setBucketProperty(KsmBucketArgs args)
+      throws IOException {
+    SetBucketPropertyRequest.Builder req =
+        SetBucketPropertyRequest.newBuilder();
+    BucketArgs bucketArgs = args.getProtobuf();
+    req.setBucketArgs(bucketArgs);
+    final SetBucketPropertyResponse resp;
+    try {
+      resp = rpcProxy.setBucketProperty(NULL_RPC_CONTROLLER,
+          req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("Setting bucket property failed, error: "
+          + resp.getStatus());
+    }
+  }
+
+  /**
+   * List buckets in a volume.
+   *
+   * @param volumeName
+   * @param startKey
+   * @param prefix
+   * @param count
+   * @return
+   * @throws IOException
+   */
+  @Override
+  public List<KsmBucketInfo> listBuckets(String volumeName,
+      String startKey, String prefix, int count) throws IOException {
+    List<KsmBucketInfo> buckets = new ArrayList<>();
+    ListBucketsRequest.Builder reqBuilder = ListBucketsRequest.newBuilder();
+    reqBuilder.setVolumeName(volumeName);
+    reqBuilder.setCount(count);
+    if (startKey != null) {
+      reqBuilder.setStartKey(startKey);
+    }
+    if (prefix != null) {
+      reqBuilder.setPrefix(prefix);
+    }
+    ListBucketsRequest request = reqBuilder.build();
+    final ListBucketsResponse resp;
+    try {
+      resp = rpcProxy.listBuckets(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+
+    if (resp.getStatus() == Status.OK) {
+      buckets.addAll(
+          resp.getBucketInfoList().stream()
+              .map(KsmBucketInfo::getFromProtobuf)
+              .collect(Collectors.toList()));
+      return buckets;
+    } else {
+      throw new IOException("List Buckets failed, error: "
+          + resp.getStatus());
+    }
+  }
+
+  /**
+   * Create a new open session of the key, then use the returned meta info to
+   * talk to data node to actually write the key.
+   * @param args the args for the key to be allocated
+   * @return a handler to the key, returned client
+   * @throws IOException
+   */
+  @Override
+  public OpenKeySession openKey(KsmKeyArgs args) throws IOException {
+    LocateKeyRequest.Builder req = LocateKeyRequest.newBuilder();
+    KeyArgs.Builder keyArgs = KeyArgs.newBuilder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setFactor(args.getFactor())
+        .setType(args.getType())
+        .setKeyName(args.getKeyName());
+    if (args.getDataSize() > 0) {
+      keyArgs.setDataSize(args.getDataSize());
+    }
+    req.setKeyArgs(keyArgs.build());
+
+    final LocateKeyResponse resp;
+    try {
+      resp = rpcProxy.createKey(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("Create key failed, error:" + resp.getStatus());
+    }
+    return new OpenKeySession(resp.getID(),
+        KsmKeyInfo.getFromProtobuf(resp.getKeyInfo()), resp.getOpenVersion());
+  }
+
+  @Override
+  public KsmKeyLocationInfo allocateBlock(KsmKeyArgs args, int clientID)
+      throws IOException {
+    AllocateBlockRequest.Builder req = AllocateBlockRequest.newBuilder();
+    KeyArgs keyArgs = KeyArgs.newBuilder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName())
+        .setFactor(args.getFactor())
+        .setType(args.getType())
+        .setDataSize(args.getDataSize()).build();
+    req.setKeyArgs(keyArgs);
+    req.setClientID(clientID);
+
+    final AllocateBlockResponse resp;
+    try {
+      resp = rpcProxy.allocateBlock(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("Allocate block failed, error:" +
+          resp.getStatus());
+    }
+    return KsmKeyLocationInfo.getFromProtobuf(resp.getKeyLocation());
+  }
+
+  @Override
+  public void commitKey(KsmKeyArgs args, int clientID)
+      throws IOException {
+    CommitKeyRequest.Builder req = CommitKeyRequest.newBuilder();
+    KeyArgs keyArgs = KeyArgs.newBuilder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName())
+        .setDataSize(args.getDataSize()).build();
+    req.setKeyArgs(keyArgs);
+    req.setClientID(clientID);
+
+    final CommitKeyResponse resp;
+    try {
+      resp = rpcProxy.commitKey(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("Commit key failed, error:" +
+          resp.getStatus());
+    }
+  }
+
+
+  @Override
+  public KsmKeyInfo lookupKey(KsmKeyArgs args) throws IOException {
+    LocateKeyRequest.Builder req = LocateKeyRequest.newBuilder();
+    KeyArgs keyArgs = KeyArgs.newBuilder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName())
+        .setDataSize(args.getDataSize()).build();
+    req.setKeyArgs(keyArgs);
+
+    final LocateKeyResponse resp;
+    try {
+      resp = rpcProxy.lookupKey(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("Lookup key failed, error:" +
+          resp.getStatus());
+    }
+    return KsmKeyInfo.getFromProtobuf(resp.getKeyInfo());
+  }
+
+  /**
+   * Deletes an existing key.
+   *
+   * @param args the args of the key.
+   * @throws IOException
+   */
+  @Override
+  public void deleteKey(KsmKeyArgs args) throws IOException {
+    LocateKeyRequest.Builder req = LocateKeyRequest.newBuilder();
+    KeyArgs keyArgs = KeyArgs.newBuilder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName()).build();
+    req.setKeyArgs(keyArgs);
+
+    final LocateKeyResponse resp;
+    try {
+      resp = rpcProxy.deleteKey(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new IOException("Delete key failed, error:" +
+          resp.getStatus());
+    }
+  }
+
+  /**
+   * Deletes an existing empty bucket from volume.
+   * @param volume - Name of the volume.
+   * @param bucket - Name of the bucket.
+   * @throws IOException
+   */
+  public void deleteBucket(String volume, String bucket) throws IOException {
+    DeleteBucketRequest.Builder req = DeleteBucketRequest.newBuilder();
+    req.setVolumeName(volume);
+    req.setBucketName(bucket);
+    final DeleteBucketResponse resp;
+    try {
+      resp = rpcProxy.deleteBucket(NULL_RPC_CONTROLLER, req.build());
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+    if (resp.getStatus() != Status.OK) {
+      throw new
+          IOException("Delete Bucket failed, error:" + resp.getStatus());
+    }
+  }
+
+  /**
+   * List keys in a bucket.
+   */
+  @Override
+  public List<KsmKeyInfo> listKeys(String volumeName, String bucketName,
+      String startKey, String prefix, int maxKeys) throws IOException {
+    List<KsmKeyInfo> keys = new ArrayList<>();
+    ListKeysRequest.Builder reqBuilder = ListKeysRequest.newBuilder();
+    reqBuilder.setVolumeName(volumeName);
+    reqBuilder.setBucketName(bucketName);
+    reqBuilder.setCount(maxKeys);
+
+    if (startKey != null) {
+      reqBuilder.setStartKey(startKey);
+    }
+
+    if (prefix != null) {
+      reqBuilder.setPrefix(prefix);
+    }
+
+    ListKeysRequest request = reqBuilder.build();
+    final ListKeysResponse resp;
+    try {
+      resp = rpcProxy.listKeys(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+
+    if (resp.getStatus() == Status.OK) {
+      keys.addAll(
+          resp.getKeyInfoList().stream()
+              .map(KsmKeyInfo::getFromProtobuf)
+              .collect(Collectors.toList()));
+      return keys;
+    } else {
+      throw new IOException("List Keys failed, error: "
+          + resp.getStatus());
+    }
+  }
+
+  @Override
+  public List<ServiceInfo> getServiceList() throws IOException {
+    ServiceListRequest request = ServiceListRequest.newBuilder().build();
+    final ServiceListResponse resp;
+    try {
+      resp = rpcProxy.getServiceList(NULL_RPC_CONTROLLER, request);
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+
+    if (resp.getStatus() == Status.OK) {
+      return resp.getServiceInfoList().stream()
+              .map(ServiceInfo::getFromProtobuf)
+              .collect(Collectors.toList());
+    } else {
+      throw new IOException("Getting service list failed, error: "
+          + resp.getStatus());
+    }
+  }
+
+  /**
+   * Return the proxy object underlying this protocol translator.
+   *
+   * @return the proxy object underlying this protocol translator.
+   */
+  @Override
+  public Object getUnderlyingProxyObject() {
+    return null;
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/KeySpaceManagerProtocolPB.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/KeySpaceManagerProtocolPB.java
new file mode 100644
index 0000000..8acca8a
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/KeySpaceManagerProtocolPB.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.protocolPB;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.KeySpaceManagerService;
+
+/**
+ * Protocol used to communicate with KSM.
+ */
+@ProtocolInfo(protocolName =
+    "org.apache.hadoop.ozone.protocol.KeySpaceManagerProtocol",
+    protocolVersion = 1)
+@InterfaceAudience.Private
+public interface KeySpaceManagerProtocolPB
+    extends KeySpaceManagerService.BlockingInterface {
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/package-info.java
new file mode 100644
index 0000000..67f9f7b
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/ksm/protocolPB/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm.protocolPB;
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/package-info.java
new file mode 100644
index 0000000..69d94b6
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+/**
+ * Classes related to ozone REST interface.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/KSMPBHelper.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/KSMPBHelper.java
new file mode 100644
index 0000000..fdc3ce7
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/KSMPBHelper.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo.OzoneAclType;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo.OzoneAclRights;
+
+/**
+ * Utilities for converting protobuf classes.
+ */
+public final class KSMPBHelper {
+
+  private KSMPBHelper() {
+    /** Hidden constructor */
+  }
+
+  /**
+   * Converts OzoneAcl into protobuf's OzoneAclInfo.
+   * @return OzoneAclInfo
+   */
+  public static OzoneAclInfo convertOzoneAcl(OzoneAcl acl) {
+    OzoneAclInfo.OzoneAclType aclType;
+    switch(acl.getType()) {
+    case USER:
+      aclType = OzoneAclType.USER;
+      break;
+    case GROUP:
+      aclType = OzoneAclType.GROUP;
+      break;
+    case WORLD:
+      aclType = OzoneAclType.WORLD;
+      break;
+    default:
+      throw new IllegalArgumentException("ACL type is not recognized");
+    }
+    OzoneAclInfo.OzoneAclRights aclRights;
+    switch(acl.getRights()) {
+    case READ:
+      aclRights = OzoneAclRights.READ;
+      break;
+    case WRITE:
+      aclRights = OzoneAclRights.WRITE;
+      break;
+    case READ_WRITE:
+      aclRights = OzoneAclRights.READ_WRITE;
+      break;
+    default:
+      throw new IllegalArgumentException("ACL right is not recognized");
+    }
+
+    return OzoneAclInfo.newBuilder().setType(aclType)
+        .setName(acl.getName())
+        .setRights(aclRights)
+        .build();
+  }
+
+  /**
+   * Converts protobuf's OzoneAclInfo into OzoneAcl.
+   * @return OzoneAcl
+   */
+  public static OzoneAcl convertOzoneAcl(OzoneAclInfo aclInfo) {
+    OzoneAcl.OzoneACLType aclType;
+    switch(aclInfo.getType()) {
+    case USER:
+      aclType = OzoneAcl.OzoneACLType.USER;
+      break;
+    case GROUP:
+      aclType = OzoneAcl.OzoneACLType.GROUP;
+      break;
+    case WORLD:
+      aclType = OzoneAcl.OzoneACLType.WORLD;
+      break;
+    default:
+      throw new IllegalArgumentException("ACL type is not recognized");
+    }
+    OzoneAcl.OzoneACLRights aclRights;
+    switch(aclInfo.getRights()) {
+    case READ:
+      aclRights = OzoneAcl.OzoneACLRights.READ;
+      break;
+    case WRITE:
+      aclRights = OzoneAcl.OzoneACLRights.WRITE;
+      break;
+    case READ_WRITE:
+      aclRights = OzoneAcl.OzoneACLRights.READ_WRITE;
+      break;
+    default:
+      throw new IllegalArgumentException("ACL right is not recognized");
+    }
+
+    return new OzoneAcl(aclType, aclInfo.getName(), aclRights);
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
new file mode 100644
index 0000000..860386d
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.protocolPB;
+
+/**
+ * This package contains classes for the Protocol Buffers binding of Ozone
+ * protocols.
+ */
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketArgs.java
new file mode 100644
index 0000000..0d5248d
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketArgs.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.handlers;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.OzoneConsts;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * BucketArgs packages all bucket related arguments to
+ * file system calls.
+ */
+public class BucketArgs extends VolumeArgs {
+  private final String bucketName;
+  private List<OzoneAcl> addAcls;
+  private List<OzoneAcl> removeAcls;
+  private OzoneConsts.Versioning versioning;
+  private StorageType storageType;
+
+  /**
+   * Constructor for BucketArgs.
+   *
+   * @param volumeName - volumeName
+   * @param bucketName - bucket Name
+   * @param userArgs - userArgs
+   */
+  public BucketArgs(String volumeName, String bucketName, UserArgs userArgs) {
+    super(volumeName, userArgs);
+    this.bucketName = bucketName;
+    this.versioning = OzoneConsts.Versioning.NOT_DEFINED;
+    this.storageType = null;
+  }
+
+
+  /**
+   * Constructor for BucketArgs.
+   *
+   * @param bucketName - bucket Name
+   * @param volumeArgs - volume Args
+   */
+  public BucketArgs(String bucketName, VolumeArgs volumeArgs) {
+    super(volumeArgs);
+    this.bucketName = bucketName;
+    this.versioning = OzoneConsts.Versioning.NOT_DEFINED;
+    this.storageType = null;
+  }
+
+  /**
+   * Constructor for BucketArgs.
+   *
+   * @param args - Bucket Args
+   */
+  public BucketArgs(BucketArgs args) {
+    this(args.getBucketName(), args);
+    this.setAddAcls(args.getAddAcls());
+    this.setRemoveAcls(args.getRemoveAcls());
+  }
+
+  /**
+   * Returns the Bucket Name.
+   *
+   * @return Bucket Name
+   */
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  /**
+   * Returns Additive ACLs for the Bucket if specified.
+   *
+   * @return acls
+   */
+  public List<OzoneAcl> getAddAcls() {
+    return addAcls;
+  }
+
+  /**
+   * Set Additive ACLs.
+   *
+   * @param acl - ACL
+   */
+  public void setAddAcls(List<OzoneAcl> acl) {
+    this.addAcls = acl;
+  }
+
+  /**
+   * Returns remove ACLs for the Bucket if specified.
+   *
+   * @return acls
+   */
+  public List<OzoneAcl> getRemoveAcls() {
+    return removeAcls;
+  }
+
+  /**
+   * Takes an ACL and sets the ACL object to ACL represented by the String.
+   *
+   * @param aclString - aclString
+   */
+  public void addAcls(List<String> aclString) throws IllegalArgumentException {
+    if (aclString == null) {
+      throw new IllegalArgumentException("ACLs cannot be null");
+    }
+    if (this.addAcls == null) {
+      this.addAcls = new LinkedList<>();
+    }
+    for (String s : aclString) {
+      this.addAcls.add(OzoneAcl.parseAcl(s));
+    }
+  }
+
+  /**
+   * Takes an ACL and sets the ACL object to ACL represented by the String.
+   *
+   * @param aclString - aclString
+   */
+  public void removeAcls(List<String> aclString)
+      throws IllegalArgumentException {
+    if (aclString == null) {
+      throw new IllegalArgumentException("ACLs cannot be null");
+    }
+    if (this.removeAcls == null) {
+      this.removeAcls = new LinkedList<>();
+    }
+    for (String s : aclString) {
+      this.removeAcls.add(OzoneAcl.parseAcl(s));
+    }
+  }
+
+  /**
+   * Set remove ACLs.
+   *
+   * @param acl - ACL
+   */
+  public void setRemoveAcls(List<OzoneAcl> acl) {
+    this.removeAcls = acl;
+  }
+
+
+  /**
+   * Returns Versioning Info.
+   *
+   * @return versioning
+   */
+  public OzoneConsts.Versioning getVersioning() {
+    return versioning;
+  }
+
+
+  /**
+   * SetVersioning Info.
+   *
+   * @param versioning - Enum value
+   */
+  public void setVersioning(OzoneConsts.Versioning versioning) {
+    this.versioning = versioning;
+  }
+
+  /**
+   * returns the current Storage Class.
+   *
+   * @return Storage Class
+   */
+  public StorageType getStorageType() {
+    return storageType;
+  }
+
+  /**
+   * Sets the Storage Class.
+   *
+   * @param storageType Set Storage Class
+   */
+  public void setStorageType(StorageType storageType) {
+    this.storageType = storageType;
+  }
+
+  /**
+   * returns - Volume/bucketName.
+   *
+   * @return String
+   */
+  @Override
+  public String getResourceName() {
+    return getVolumeName() + "/" + getBucketName();
+  }
+
+  /**
+   * Returns User/Volume name which is the parent of this
+   * bucket.
+   *
+   * @return String
+   */
+  public String getParentName() {
+    return getUserName() + "/" + getVolumeName();
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyArgs.java
new file mode 100644
index 0000000..48a4cb4
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyArgs.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+/**
+ * Class that packages all key Arguments.
+ */
+public class KeyArgs extends BucketArgs {
+  private String key;
+  private String hash;
+  private long size;
+
+  /**
+   * Constructor for Key Args.
+   *
+   * @param volumeName - Volume Name
+   * @param bucketName - Bucket Name
+   * @param objectName - Key
+   */
+  public KeyArgs(String volumeName, String bucketName,
+                 String objectName, UserArgs args) {
+    super(volumeName, bucketName, args);
+    this.key = objectName;
+  }
+
+  /**
+   * Constructor for Key Args.
+   *
+   * @param objectName - Key
+   * @param args - Bucket Args
+   */
+  public KeyArgs(String objectName, BucketArgs args) {
+    super(args);
+    this.key = objectName;
+  }
+
+  /**
+   * Get Key Name.
+   *
+   * @return String
+   */
+  public String getKeyName() {
+    return this.key;
+  }
+
+  /**
+   * Computed File hash.
+   *
+   * @return String
+   */
+  public String getHash() {
+    return hash;
+  }
+
+  /**
+   * Sets the hash String.
+   *
+   * @param hash String
+   */
+  public void setHash(String hash) {
+    this.hash = hash;
+  }
+
+  /**
+   * Returns the file size.
+   *
+   * @return long - file size
+   */
+  public long getSize() {
+    return size;
+  }
+
+  /**
+   * Set Size.
+   *
+   * @param size Size of the file
+   */
+  public void setSize(long size) {
+    this.size = size;
+  }
+
+  /**
+   * Returns the name of the resource.
+   *
+   * @return String
+   */
+  @Override
+  public String getResourceName() {
+    return super.getResourceName() + "/" + getKeyName();
+  }
+
+  /**
+   * Parent name of this resource.
+   *
+   * @return String.
+   */
+  @Override
+  public String getParentName() {
+    return super.getResourceName();
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/ListArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/ListArgs.java
new file mode 100644
index 0000000..49ca4a4
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/ListArgs.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.handlers;
+
+/**
+ * Supports listing keys with pagination.
+ */
+public class ListArgs<T extends UserArgs> {
+  private String prevKey;
+  private String prefix;
+  private int maxKeys;
+  private boolean rootScan;
+  private T args;
+
+  /**
+   * Constructor for ListArgs.
+   *
+   * @param args      - BucketArgs
+   * @param prefix    Prefix to start Query from
+   * @param maxKeys   Max result set
+   * @param prevKey - Page token
+   */
+  public ListArgs(T args, String prefix, int maxKeys,
+                  String prevKey) {
+    setArgs(args);
+    setPrefix(prefix);
+    setMaxKeys(maxKeys);
+    setPrevKey(prevKey);
+  }
+
+  /**
+   * Copy Constructor for ListArgs.
+   *
+   * @param args - List Args
+   */
+  public ListArgs(T args, ListArgs listArgs) {
+    this(args, listArgs.getPrefix(), listArgs.getMaxKeys(),
+        listArgs.getPrevKey());
+  }
+
+  /**
+   * Returns page token.
+   *
+   * @return String
+   */
+  public String getPrevKey() {
+    return prevKey;
+  }
+
+  /**
+   * Sets page token.
+   *
+   * @param prevKey - Page token
+   */
+  public void setPrevKey(String prevKey) {
+    this.prevKey = prevKey;
+  }
+
+  /**
+   * Gets max keys.
+   *
+   * @return int
+   */
+  public int getMaxKeys() {
+    return maxKeys;
+  }
+
+  /**
+   * Sets max keys.
+   *
+   * @param maxKeys - Maximum keys to return
+   */
+  public void setMaxKeys(int maxKeys) {
+    this.maxKeys = maxKeys;
+  }
+
+  /**
+   * Gets prefix.
+   *
+   * @return String
+   */
+  public String getPrefix() {
+    return prefix;
+  }
+
+  /**
+   * Sets prefix.
+   *
+   * @param prefix - The prefix that we are looking for
+   */
+  public void setPrefix(String prefix) {
+    this.prefix = prefix;
+  }
+
+  /**
+   * Gets args.
+   * @return  T
+   */
+  public T getArgs() {
+    return args;
+  }
+
+  /**
+   * Sets  args.
+   * @param args T
+   */
+  public void setArgs(T args) {
+    this.args = args;
+  }
+
+  /**
+   * Checks if we are doing a rootScan.
+   * @return - RootScan.
+   */
+  public boolean isRootScan() {
+    return rootScan;
+  }
+
+  /**
+   * Sets the RootScan property.
+   * @param rootScan - Boolean.
+   */
+  public void setRootScan(boolean rootScan) {
+    this.rootScan = rootScan;
+  }
+
+}
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/UserArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/UserArgs.java
new file mode 100644
index 0000000..07856d0
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/UserArgs.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.handlers;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.UriInfo;
+
+/**
+ * UserArgs is used to package caller info
+ * and pass it down to file system.
+ */
+@InterfaceAudience.Private
+public class UserArgs {
+  private String userName;
+  private final String requestID;
+  private final String hostName;
+  private final UriInfo uri;
+  private final Request request;
+  private final HttpHeaders headers;
+  private String[] groups;
+
+
+  /**
+   * Constructs  user args.
+   *
+   * @param userName - User name
+   * @param requestID - Request ID
+   * @param hostName - Host Name
+   * @param req - Request
+   * @param info - Uri Info
+   * @param httpHeaders - http headers
+   */
+  public UserArgs(String userName, String requestID, String hostName,
+                  Request req, UriInfo info, HttpHeaders httpHeaders) {
+    this.hostName = hostName;
+    this.userName = userName;
+    this.requestID = requestID;
+    this.uri = info;
+    this.request = req;
+    this.headers = httpHeaders;
+  }
+
+  /**
+   * Constructs  user args when we don't know the user name yet.
+   *
+   * @param requestID _ Request ID
+   * @param hostName - Host Name
+   * @param req - Request
+   * @param info - UriInfo
+   * @param httpHeaders - http headers
+   */
+  public UserArgs(String requestID, String hostName, Request req, UriInfo info,
+                  HttpHeaders httpHeaders) {
+    this.hostName = hostName;
+    this.requestID = requestID;
+    this.uri = info;
+    this.request = req;
+    this.headers = httpHeaders;
+  }
+
+  /**
+   * Returns hostname.
+   *
+   * @return String
+   */
+  public String getHostName() {
+    return hostName;
+  }
+
+  /**
+   * Returns RequestID.
+   *
+   * @return Long
+   */
+  public String getRequestID() {
+    return requestID;
+  }
+
+  /**
+   * Returns User Name.
+   *
+   * @return String
+   */
+  public String getUserName() {
+    return userName;
+  }
+
+  /**
+   * Sets the user name.
+   *
+   * @param userName Name of the user
+   */
+  public void setUserName(String userName) {
+    this.userName = userName;
+  }
+
+  /**
+   * Returns list of groups.
+   *
+   * @return String[]
+   */
+  public String[] getGroups() {
+    return this.groups;
+  }
+
+  /**
+   * Sets the group list.
+   *
+   * @param groups list of groups
+   */
+  public void setGroups(String[] groups) {
+    this.groups = groups;
+  }
+
+  /**
+   * Returns the resource Name.
+   *
+   * @return String Resource.
+   */
+  public String getResourceName() {
+    return getUserName();
+  }
+
+  /**
+   * Returns Http Headers for this call.
+   *
+   * @return httpHeaders
+   */
+  public HttpHeaders getHeaders() {
+    return headers;
+  }
+
+  /**
+   * Returns Request Object.
+   *
+   * @return Request
+   */
+  public Request getRequest() {
+    return request;
+  }
+
+  /**
+   * Returns UriInfo.
+   *
+   * @return UriInfo
+   */
+  public UriInfo getUri() {
+    return uri;
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeArgs.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeArgs.java
new file mode 100644
index 0000000..1d67c67
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeArgs.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.UriInfo;
+
+/**
+ * VolumeArgs is used to package all volume
+ * related arguments in the call to underlying
+ * file system.
+ */
+@InterfaceAudience.Private
+public class VolumeArgs extends UserArgs {
+  private String adminName;
+  private final String volumeName;
+  private OzoneQuota quota;
+
+  /**
+   * Returns Quota Information.
+   *
+   * @return Quota
+   */
+  public OzoneQuota getQuota() {
+    return quota;
+  }
+
+  /**
+   * Returns volume name.
+   *
+   * @return String
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Constructs  volume Args.
+   *
+   * @param userName - User name
+   * @param volumeName - volume Name
+   * @param requestID _ Request ID
+   * @param hostName - Host Name
+   * @param request  - Http Request
+   * @param info - URI info
+   * @param headers - http headers
+   * @param groups - list of groups allowed to access the volume
+   */
+  public VolumeArgs(String userName, String volumeName, String requestID,
+                    String hostName, Request request, UriInfo info,
+                    HttpHeaders headers, String[] groups) {
+    super(userName, requestID, hostName, request, info, headers);
+    super.setGroups(groups);
+    this.volumeName = volumeName;
+  }
+
+  /**
+   * Constructs  volume Args.
+   *
+   * @param volumeName - volume Name
+   * @param userArgs - userArgs
+   */
+  public VolumeArgs(String volumeName, UserArgs userArgs) {
+    this(userArgs.getUserName(), volumeName, userArgs.getRequestID(),
+         userArgs.getHostName(), userArgs.getRequest(), userArgs.getUri(),
+         userArgs.getHeaders(), userArgs.getGroups());
+  }
+
+  /**
+   * Creates VolumeArgs from another VolumeArgs.
+   */
+  public VolumeArgs(VolumeArgs volArgs) {
+    this(volArgs.getVolumeName(), volArgs);
+  }
+
+  /**
+   * Sets Quota information.
+   *
+   * @param quota - Quota Sting
+   * @throws IllegalArgumentException
+   */
+  public void setQuota(String quota) throws IllegalArgumentException {
+    this.quota = OzoneQuota.parseQuota(quota);
+  }
+
+  /**
+   * Sets quota information.
+   *
+   * @param quota - OzoneQuota
+   */
+  public void setQuota(OzoneQuota quota) {
+    this.quota = quota;
+  }
+
+  /**
+   * Gets admin Name.
+   *
+   * @return - Admin Name
+   */
+  public String getAdminName() {
+    return adminName;
+  }
+
+  /**
+   * Sets Admin Name.
+   *
+   * @param adminName - Admin Name
+   */
+  public void setAdminName(String adminName) {
+    this.adminName = adminName;
+  }
+
+  /**
+   * Returns UserName/VolumeName.
+   *
+   * @return String
+   */
+  @Override
+  public String getResourceName() {
+    return super.getResourceName() + "/" + getVolumeName();
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/package-info.java
new file mode 100644
index 0000000..a66a773c
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/handlers/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.handlers;
+
+/**
+ * REST handler value classes.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/request/OzoneQuota.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/request/OzoneQuota.java
new file mode 100644
index 0000000..9619ebd
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/request/OzoneQuota.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.request;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+/**
+ * represents an OzoneQuota Object that can be applied to
+ * a storage volume.
+ */
+@InterfaceAudience.Private
+public class OzoneQuota {
+
+  private Units unit;
+  private int size;
+
+  /** Quota Units.*/
+  public enum Units {UNDEFINED, BYTES, MB, GB, TB}
+
+  /**
+   * Returns size.
+   *
+   * @return int
+   */
+  public int getSize() {
+    return size;
+  }
+
+  /**
+   * Returns Units.
+   *
+   * @return Unit in MB, GB or TB
+   */
+  public Units getUnit() {
+    return unit;
+  }
+
+  /**
+   * Constructs a default Quota object.
+   */
+  public OzoneQuota() {
+    this.size = 0;
+    this.unit = Units.UNDEFINED;
+  }
+
+  /**
+   * Constructor for Ozone Quota.
+   *
+   * @param size - Integer Size
+   * @param unit MB, GB  or TB
+   */
+  public OzoneQuota(int size, Units unit) {
+    this.size = size;
+    this.unit = unit;
+  }
+
+  /**
+   * Formats a quota as a string.
+   *
+   * @param quota the quota to format
+   * @return string representation of quota
+   */
+  public static String formatQuota(OzoneQuota quota) {
+    return String.valueOf(quota.size) + quota.unit;
+  }
+
+  /**
+   * Parses a user provided string and returns the
+   * Quota Object.
+   *
+   * @param quotaString Quota String
+   *
+   * @return OzoneQuota object
+   *
+   * @throws IllegalArgumentException
+   */
+  public static OzoneQuota parseQuota(String quotaString)
+      throws IllegalArgumentException {
+
+    if ((quotaString == null) || (quotaString.isEmpty())) {
+      throw new IllegalArgumentException(
+          "Quota string cannot be null or empty.");
+    }
+
+    if (isRemove(quotaString)) {
+      throw new IllegalArgumentException("Remove is invalid in this context.");
+    }
+
+    String uppercase = quotaString.toUpperCase().replaceAll("\\s+", "");
+    String size = "";
+    int nSize;
+    Units currUnit = Units.MB;
+    Boolean found = false;
+    if (uppercase.endsWith(Header.OZONE_QUOTA_MB)) {
+      size = uppercase
+          .substring(0, uppercase.length() - Header.OZONE_QUOTA_MB.length());
+      currUnit = Units.MB;
+      found = true;
+    }
+
+    if (uppercase.endsWith(Header.OZONE_QUOTA_GB)) {
+      size = uppercase
+          .substring(0, uppercase.length() - Header.OZONE_QUOTA_GB.length());
+      currUnit = Units.GB;
+      found = true;
+    }
+
+    if (uppercase.endsWith(Header.OZONE_QUOTA_TB)) {
+      size = uppercase
+          .substring(0, uppercase.length() - Header.OZONE_QUOTA_TB.length());
+      currUnit = Units.TB;
+      found = true;
+    }
+
+    if (uppercase.endsWith(Header.OZONE_QUOTA_BYTES)) {
+      size = uppercase
+          .substring(0, uppercase.length() - Header.OZONE_QUOTA_BYTES.length());
+      currUnit = Units.BYTES;
+      found = true;
+    }
+
+    if (!found) {
+      throw new IllegalArgumentException(
+          "Quota unit not recognized. Supported values are BYTES, MB, GB and " +
+              "TB.");
+    }
+
+    nSize = Integer.parseInt(size);
+    if (nSize < 0) {
+      throw new IllegalArgumentException("Quota cannot be negative.");
+    }
+
+    return new OzoneQuota(nSize, currUnit);
+  }
+
+
+  /**
+   * Checks if Quota String is just as remove operation.
+   *
+   * @param quotaString User provided quota String
+   *
+   * @return True if it is Remove, false otherwise
+   */
+  public static boolean isRemove(String quotaString) {
+
+    return (quotaString != null) &&
+        (quotaString.compareToIgnoreCase(Header.OZONE_QUOTA_REMOVE) == 0);
+  }
+
+  /**
+   * Returns size in Bytes or -1 if there is no Quota.
+   */
+  @JsonIgnore
+  public long sizeInBytes() {
+    switch (this.unit) {
+    case BYTES:
+      return this.getSize();
+    case MB:
+      return this.getSize() * OzoneConsts.MB;
+    case GB:
+      return this.getSize() * OzoneConsts.GB;
+    case TB:
+      return this.getSize() * OzoneConsts.TB;
+    case UNDEFINED:
+    default:
+      return -1;
+    }
+  }
+
+  /**
+   * Returns OzoneQuota corresponding to size in bytes.
+   *
+   * @param sizeInBytes size in bytes to be converted
+   *
+   * @return OzoneQuota object
+   */
+  public static OzoneQuota getOzoneQuota(long sizeInBytes) {
+    long size;
+    Units unit;
+    if (sizeInBytes % OzoneConsts.TB == 0) {
+      size = sizeInBytes / OzoneConsts.TB;
+      unit = Units.TB;
+    } else if (sizeInBytes % OzoneConsts.GB == 0) {
+      size = sizeInBytes / OzoneConsts.GB;
+      unit = Units.GB;
+    } else if (sizeInBytes % OzoneConsts.MB == 0) {
+      size = sizeInBytes / OzoneConsts.MB;
+      unit = Units.MB;
+    } else {
+      size = sizeInBytes;
+      unit = Units.BYTES;
+    }
+    return new OzoneQuota((int)size, unit);
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/request/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/request/package-info.java
new file mode 100644
index 0000000..4fbc18f
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/request/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Netty-based HTTP server implementation for Ozone.
+ */
+package org.apache.hadoop.ozone.web.request;
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/BucketInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/BucketInfo.java
new file mode 100644
index 0000000..e66cd20
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/BucketInfo.java
@@ -0,0 +1,325 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.response;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonFilter;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
+import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+import com.google.common.base.Preconditions;
+
+/**
+ * BucketInfo class, this is used as response class to send
+ * Json info about a bucket back to a client.
+ */
+public class BucketInfo implements Comparable<BucketInfo> {
+  static final String BUCKET_INFO = "BUCKET_INFO_FILTER";
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(BucketInfo.class);
+  private static final ObjectWriter WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    String[] ignorableFieldNames = {"bytesUsed", "keyCount"};
+
+    FilterProvider filters = new SimpleFilterProvider().addFilter(BUCKET_INFO,
+        SimpleBeanPropertyFilter.serializeAllExcept(ignorableFieldNames));
+    mapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY);
+    mapper.addMixIn(Object.class, MixIn.class);
+
+    mapper.setFilterProvider(filters);
+    WRITER = mapper.writerWithDefaultPrettyPrinter();
+  }
+
+  private String volumeName;
+  private String bucketName;
+  private String createdOn;
+  private List<OzoneAcl> acls;
+  private OzoneConsts.Versioning versioning;
+  private StorageType storageType;
+  private long bytesUsed;
+  private long keyCount;
+
+  /**
+   * Constructor for BucketInfo.
+   *
+   * @param volumeName
+   * @param bucketName
+   */
+  public BucketInfo(String volumeName, String bucketName) {
+    this.volumeName = volumeName;
+    this.bucketName = bucketName;
+  }
+
+
+  /**
+   * Default constructor for BucketInfo.
+   */
+  public BucketInfo() {
+    acls = new LinkedList<OzoneAcl>();
+  }
+
+  /**
+   * Parse a JSON string into BucketInfo Object.
+   *
+   * @param jsonString - Json String
+   *
+   * @return - BucketInfo
+   *
+   * @throws IOException
+   */
+  public static BucketInfo parse(String jsonString) throws IOException {
+    return READER.readValue(jsonString);
+  }
+
+  /**
+   * Returns a List of ACL on the Bucket.
+   *
+   * @return List of Acls
+   */
+  public List<OzoneAcl> getAcls() {
+    return acls;
+  }
+
+  /**
+   * Sets ACls.
+   *
+   * @param acls - Acls list
+   */
+  public void setAcls(List<OzoneAcl> acls) {
+    this.acls = acls;
+  }
+
+  /**
+   * Returns Storage Type info.
+   *
+   * @return Storage Type of the bucket
+   */
+  public StorageType getStorageType() {
+    return storageType;
+  }
+
+  /**
+   * Sets the Storage Type.
+   *
+   * @param storageType - Storage Type
+   */
+  public void setStorageType(StorageType storageType) {
+    this.storageType = storageType;
+  }
+
+  /**
+   * Returns versioning.
+   *
+   * @return versioning Enum
+   */
+  public OzoneConsts.Versioning getVersioning() {
+    return versioning;
+  }
+
+  /**
+   * Sets Versioning.
+   *
+   * @param versioning
+   */
+  public void setVersioning(OzoneConsts.Versioning versioning) {
+    this.versioning = versioning;
+  }
+
+
+  /**
+   * Gets bucket Name.
+   *
+   * @return String
+   */
+  public String getBucketName() {
+    return bucketName;
+  }
+
+  /**
+   * Sets bucket Name.
+   *
+   * @param bucketName - Name of the bucket
+   */
+  public void setBucketName(String bucketName) {
+    this.bucketName = bucketName;
+  }
+
+  /**
+   * Sets creation time of the bucket.
+   *
+   * @param creationTime - Date String
+   */
+  public void setCreatedOn(String creationTime) {
+    this.createdOn = creationTime;
+  }
+
+  /**
+   * Returns creation time.
+   *
+   * @return creation time of bucket.
+   */
+  public String getCreatedOn() {
+    return createdOn;
+  }
+
+  /**
+   * Returns a JSON string of this object.
+   * After stripping out bytesUsed and keyCount
+   *
+   * @return String
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  /**
+   * Returns the Object as a Json String.
+   *
+   * The reason why both toJSONString exists and toDBString exists
+   * is because toJSONString supports an external facing contract with
+   * REST clients. However server internally would want to add more
+   * fields to this class. The distinction helps in serializing all
+   * fields vs. only fields that are part of REST protocol.
+   */
+  public String toDBString() throws IOException {
+    return JsonUtils.toJsonString(this);
+  }
+
+  /**
+   * Returns Volume Name.
+   *
+   * @return String volume name
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Sets the Volume Name of the bucket.
+   *
+   * @param volumeName - volumeName
+   */
+  public void setVolumeName(String volumeName) {
+    this.volumeName = volumeName;
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less
+   * than, equal to, or greater than the specified object.
+   *
+   * Please note : BucketInfo compare functions are used only within the
+   * context of a volume, hence volume name is purposefully ignored in
+   * compareTo, equal and hashcode functions of this class.
+   */
+  @Override
+  public int compareTo(BucketInfo o) {
+    Preconditions.checkState(o.getVolumeName().equals(this.getVolumeName()));
+    return this.bucketName.compareTo(o.getBucketName());
+  }
+
+  /**
+   * Checks if two bucketInfo's are equal.
+   * @param o Object BucketInfo
+   * @return  True or False
+   */
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof BucketInfo)) {
+      return false;
+    }
+
+    BucketInfo that = (BucketInfo) o;
+    Preconditions.checkState(that.getVolumeName().equals(this.getVolumeName()));
+    return bucketName.equals(that.bucketName);
+
+  }
+
+  /**
+   * Hash Code for this object.
+   * @return int
+   */
+  @Override
+  public int hashCode() {
+    return bucketName.hashCode();
+  }
+
+  /**
+   * Get the number of bytes used by this bucket.
+   *
+   * @return long
+   */
+  public long getBytesUsed() {
+    return bytesUsed;
+  }
+
+  /**
+   * Set bytes Used.
+   *
+   * @param bytesUsed - bytesUsed
+   */
+  public void setBytesUsed(long bytesUsed) {
+    this.bytesUsed = bytesUsed;
+  }
+
+  /**
+   * Get Key Count  inside this bucket.
+   *
+   * @return - KeyCount
+   */
+  public long getKeyCount() {
+    return keyCount;
+  }
+
+  /**
+   * Set Key Count inside this bucket.
+   *
+   * @param keyCount - Sets the Key Count
+   */
+  public void setKeyCount(long keyCount) {
+    this.keyCount = keyCount;
+  }
+
+  /**
+   * This class allows us to create custom filters
+   * for the Json serialization.
+   */
+  @JsonFilter(BUCKET_INFO)
+  class MixIn {
+
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/KeyInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/KeyInfo.java
new file mode 100644
index 0000000..34885f6
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/KeyInfo.java
@@ -0,0 +1,290 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.response;
+
+import java.io.IOException;
+
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonFilter;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
+import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+
+/**
+ * Represents an Ozone key Object.
+ */
+public class KeyInfo implements Comparable<KeyInfo> {
+  static final String OBJECT_INFO = "OBJECT_INFO_FILTER";
+
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(KeyInfo.class);
+  private static final ObjectWriter WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    String[] ignorableFieldNames = {"dataFileName"};
+
+    FilterProvider filters = new SimpleFilterProvider()
+        .addFilter(OBJECT_INFO, SimpleBeanPropertyFilter
+            .serializeAllExcept(ignorableFieldNames));
+    mapper.setVisibility(PropertyAccessor.FIELD,
+        JsonAutoDetect.Visibility.ANY);
+    mapper.addMixIn(Object.class, MixIn.class);
+
+    mapper.setFilterProvider(filters);
+    WRITER = mapper.writerWithDefaultPrettyPrinter();
+  }
+
+  /**
+   * This class allows us to create custom filters
+   * for the Json serialization.
+   */
+  @JsonFilter(OBJECT_INFO)
+  class MixIn {
+
+  }
+  private long version;
+  private String md5hash;
+  private String createdOn;
+  private String modifiedOn;
+  private long size;
+  private String keyName;
+
+  private String dataFileName;
+
+  /**
+   * When this key was created.
+   *
+   * @return Date String
+   */
+  public String getCreatedOn() {
+    return createdOn;
+  }
+
+  /**
+   * When this key was modified.
+   *
+   * @return Date String
+   */
+  public String getModifiedOn() {
+    return modifiedOn;
+  }
+
+  /**
+   * When this key was created.
+   *
+   * @param createdOn - Date String
+   */
+  public void setCreatedOn(String createdOn) {
+    this.createdOn = createdOn;
+  }
+
+  /**
+   * When this key was modified.
+   *
+   * @param modifiedOn - Date String
+   */
+  public void setModifiedOn(String modifiedOn) {
+    this.modifiedOn = modifiedOn;
+  }
+
+  /**
+   * Full path to where the actual data for this key is stored.
+   *
+   * @return String
+   */
+  public String getDataFileName() {
+    return dataFileName;
+  }
+
+  /**
+   * Sets up where the file path is stored.
+   *
+   * @param dataFileName - Data File Name
+   */
+  public void setDataFileName(String dataFileName) {
+    this.dataFileName = dataFileName;
+  }
+
+  /**
+   * Gets the Keyname of this object.
+   *
+   * @return String
+   */
+  public String getKeyName() {
+    return keyName;
+  }
+
+  /**
+   * Sets the Key name of this object.
+   *
+   * @param keyName - String
+   */
+  public void setKeyName(String keyName) {
+    this.keyName = keyName;
+  }
+
+  /**
+   * Returns the MD5 Hash for the data of this key.
+   *
+   * @return String MD5
+   */
+  public String getMd5hash() {
+    return md5hash;
+  }
+
+  /**
+   * Sets the MD5 of this file.
+   *
+   * @param md5hash - Md5 of this file
+   */
+  public void setMd5hash(String md5hash) {
+    this.md5hash = md5hash;
+  }
+
+  /**
+   * Number of bytes stored in the data part of this key.
+   *
+   * @return long size of the data file
+   */
+  public long getSize() {
+    return size;
+  }
+
+  /**
+   * Sets the size of the Data part of this key.
+   *
+   * @param size - Size in long
+   */
+  public void setSize(long size) {
+    this.size = size;
+  }
+
+  /**
+   * Version of this key.
+   *
+   * @return - returns the version of this key.
+   */
+  public long getVersion() {
+    return version;
+  }
+
+  /**
+   * Sets the version of this key.
+   *
+   * @param version - Version String
+   */
+  public void setVersion(long version) {
+    this.version = version;
+  }
+
+  /**
+   * Compares this object with the specified object for order.  Returns a
+   * negative integer, zero, or a positive integer as this object is less
+   * than, equal to, or greater than the specified object.
+   *
+   * @param o the object to be compared.
+   *
+   * @return a negative integer, zero, or a positive integer as this object
+   * is less than, equal to, or greater than the specified object.
+   *
+   * @throws NullPointerException if the specified object is null
+   * @throws ClassCastException if the specified object's type prevents it
+   * from being compared to this object.
+   */
+  @Override
+  public int compareTo(KeyInfo o) {
+    if (this.keyName.compareTo(o.getKeyName()) != 0) {
+      return this.keyName.compareTo(o.getKeyName());
+    }
+
+    if (this.getVersion() == o.getVersion()) {
+      return 0;
+    }
+    if (this.getVersion() < o.getVersion()) {
+      return -1;
+    }
+    return 1;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    KeyInfo keyInfo = (KeyInfo) o;
+
+    return new EqualsBuilder()
+        .append(version, keyInfo.version)
+        .append(keyName, keyInfo.keyName)
+        .isEquals();
+  }
+
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder(17, 37)
+        .append(version)
+        .append(keyName)
+        .toHashCode();
+  }
+
+  /**
+
+   * Parse a string to retuen BucketInfo Object.
+   *
+   * @param jsonString - Json String
+   *
+   * @return - BucketInfo
+   *
+   * @throws IOException
+   */
+  public static KeyInfo parse(String jsonString) throws IOException {
+    return READER.readValue(jsonString);
+  }
+
+
+  /**
+   * Returns a JSON string of this object.
+   * After stripping out bytesUsed and keyCount
+   *
+   * @return String
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  /**
+   * Returns the Object as a Json String.
+   */
+  public String toDBString() throws IOException {
+    return JsonUtils.toJsonString(this);
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListBuckets.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListBuckets.java
new file mode 100644
index 0000000..bc4e65b
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListBuckets.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.response;
+
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonFilter;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
+import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+
+/**
+ * List Bucket is the response for the ListBucket Query.
+ */
+public class ListBuckets {
+  static final String BUCKET_LIST = "BUCKET_LIST_FILTER";
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(ListBuckets.class);
+  private static final ObjectWriter WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    String[] ignorableFieldNames = {"dataFileName"};
+
+    FilterProvider filters = new SimpleFilterProvider()
+        .addFilter(BUCKET_LIST, SimpleBeanPropertyFilter
+            .serializeAllExcept(ignorableFieldNames));
+    mapper.setVisibility(PropertyAccessor.FIELD,
+        JsonAutoDetect.Visibility.ANY);
+    mapper.addMixIn(Object.class, MixIn.class);
+
+    mapper.setFilterProvider(filters);
+    WRITER = mapper.writerWithDefaultPrettyPrinter();
+  }
+
+  private List<BucketInfo> buckets;
+
+  /**
+   * Constructor for ListBuckets.
+   * @param buckets - List of buckets owned by this user
+   */
+  public ListBuckets(List<BucketInfo> buckets) {
+    this.buckets = buckets;
+
+  }
+
+  /**
+   * Constructor for ListBuckets.
+  */
+  public ListBuckets() {
+    this.buckets = new LinkedList<BucketInfo>();
+  }
+
+  /**
+   * Parses a String to return ListBuckets object.
+   *
+   * @param data - Json String
+   *
+   * @return - ListBuckets
+   *
+   * @throws IOException
+   */
+  public static ListBuckets parse(String data) throws IOException {
+    return READER.readValue(data);
+  }
+
+  /**
+   * Returns a list of Buckets.
+   *
+   * @return Bucket list
+   */
+  public List<BucketInfo> getBuckets() {
+    return buckets;
+  }
+
+  /**
+   * Sets the list of buckets owned by this user.
+   *
+   * @param buckets - List of Buckets
+   */
+  public void setBuckets(List<BucketInfo> buckets) {
+    this.buckets = buckets;
+  }
+
+
+  /**
+   * Returns a JSON string of this object.
+   * After stripping out bytesUsed and keyCount
+   *
+   * @return String
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  /**
+   * Returns the Object as a Json String.
+   */
+  public String toDBString() throws IOException {
+    return JsonUtils.toJsonString(this);
+  }
+
+  /**
+   * Sorts the buckets based on bucketName.
+   * This is useful when we return the list of buckets
+   */
+  public void sort() {
+    Collections.sort(buckets);
+  }
+
+  /**
+   * Add a new bucket to the list of buckets.
+   * @param bucketInfo - bucket Info
+   */
+  public void addBucket(BucketInfo bucketInfo){
+    this.buckets.add(bucketInfo);
+  }
+
+  /**
+   * This class allows us to create custom filters
+   * for the Json serialization.
+   */
+  @JsonFilter(BUCKET_LIST)
+  class MixIn {
+
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListKeys.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListKeys.java
new file mode 100644
index 0000000..9dc77d2
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListKeys.java
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.response;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.ListArgs;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonFilter;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
+import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+import com.google.common.base.Preconditions;
+
+/**
+ * This class the represents the list of keys (Objects) in a bucket.
+ */
+public class ListKeys {
+  static final String OBJECT_LIST = "OBJECT_LIST_FILTER";
+
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(ListKeys.class);
+  private static final ObjectWriter WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    String[] ignorableFieldNames = {"dataFileName"};
+
+    FilterProvider filters = new SimpleFilterProvider()
+        .addFilter(OBJECT_LIST, SimpleBeanPropertyFilter
+            .serializeAllExcept(ignorableFieldNames));
+    mapper.setVisibility(PropertyAccessor.FIELD,
+        JsonAutoDetect.Visibility.ANY);
+    mapper.addMixIn(Object.class, MixIn.class);
+
+    mapper.setFilterProvider(filters);
+    WRITER = mapper.writerWithDefaultPrettyPrinter();
+  }
+
+  private String name;
+  private String prefix;
+  private long maxKeys;
+  private boolean truncated;
+  private List<KeyInfo> keyList;
+
+  /**
+   * Default constructor needed for json serialization.
+   */
+  public ListKeys() {
+    this.keyList = new LinkedList<>();
+  }
+
+  /**
+   * Constructor for ListKeys.
+   *
+   * @param args      ListArgs
+   * @param truncated is truncated
+   */
+  public ListKeys(ListArgs args, boolean truncated) {
+    Preconditions.checkState(args.getArgs() instanceof  BucketArgs);
+    this.name = ((BucketArgs) args.getArgs()).getBucketName();
+    this.prefix = args.getPrefix();
+    this.maxKeys = args.getMaxKeys();
+    this.truncated = truncated;
+  }
+
+  /**
+   * Converts a Json string to POJO.
+   * @param jsonString - json string.
+   * @return ListObject
+   * @throws IOException - Json conversion error.
+   */
+  public static ListKeys parse(String jsonString) throws IOException {
+    return READER.readValue(jsonString);
+  }
+
+  /**
+   * Returns a list of Objects.
+   *
+   * @return List of KeyInfo Objects.
+   */
+  public List<KeyInfo> getKeyList() {
+    return keyList;
+  }
+
+  /**
+   * Sets the list of Objects.
+   *
+   * @param objectList - List of Keys
+   */
+  public void setKeyList(List<KeyInfo> objectList) {
+    this.keyList = objectList;
+  }
+
+  /**
+   * Gets the Max Key Count.
+   *
+   * @return long
+   */
+  public long getMaxKeys() {
+    return maxKeys;
+  }
+
+  /**
+   * Gets bucket Name.
+   *
+   * @return String
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * Gets Prefix.
+   *
+   * @return String
+   */
+  public String getPrefix() {
+    return prefix;
+  }
+
+  /**
+   * Gets truncated Status.
+   *
+   * @return Boolean
+   */
+  public boolean isTruncated() {
+    return truncated;
+  }
+
+  /**
+   * Sets the value of truncated.
+   *
+   * @param value - Boolean
+   */
+  public void setTruncated(boolean value) {
+    this.truncated = value;
+  }
+
+  /**
+   * Returns a JSON string of this object. After stripping out bytesUsed and
+   * keyCount.
+   *
+   * @return String
+   * @throws  IOException - On json Errors.
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  /**
+   * Returns the Object as a Json String.
+   *
+   * @return String
+   * @throws IOException - on json errors.
+   */
+  public String toDBString() throws IOException {
+    return JsonUtils.toJsonString(this);
+  }
+
+  /**
+   * Sorts the keys based on name and version. This is useful when we return the
+   * list of keys.
+   */
+  public void sort() {
+    Collections.sort(keyList);
+  }
+
+  /**
+   * Add a new key to the list of keys.
+   * @param keyInfo - key Info
+   */
+  public void addKey(KeyInfo keyInfo){
+    this.keyList.add(keyInfo);
+  }
+
+  /**
+   * This class allows us to create custom filters for the Json serialization.
+   */
+  @JsonFilter(OBJECT_LIST)
+  class MixIn {
+
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListVolumes.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListVolumes.java
new file mode 100644
index 0000000..b918349
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/ListVolumes.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.response;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonFilter;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
+import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+
+/**
+ * List Volume Class is the class that is returned in JSON format to
+ * users when they call ListVolumes.
+ */
+@InterfaceAudience.Private
+public class ListVolumes {
+  private List<VolumeInfo> volumes;
+
+  static final String VOLUME_LIST = "VOLUME_LIST_FILTER";
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(ListVolumes.class);
+  private static final ObjectWriter WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    String[] ignorableFieldNames = {"bytesUsed", "bucketCount"};
+
+    FilterProvider filters = new SimpleFilterProvider()
+        .addFilter(VOLUME_LIST, SimpleBeanPropertyFilter
+            .serializeAllExcept(ignorableFieldNames));
+    mapper.setVisibility(PropertyAccessor.FIELD,
+        JsonAutoDetect.Visibility.ANY);
+    mapper.addMixIn(Object.class, MixIn.class);
+
+    mapper.setFilterProvider(filters);
+    WRITER = mapper.writerWithDefaultPrettyPrinter();
+  }
+
+  /**
+   * Used for json filtering.
+   */
+  @JsonFilter(VOLUME_LIST)
+  class MixIn {
+  }
+
+  /**
+   * Constructs ListVolume objects.
+   */
+  public ListVolumes() {
+    this.volumes = new LinkedList<VolumeInfo>();
+  }
+
+  /**
+   * Gets the list of volumes.
+   *
+   * @return List of VolumeInfo Objects
+   */
+  public List<VolumeInfo> getVolumes() {
+    return volumes;
+  }
+
+
+  /**
+   * Sets volume info.
+   *
+   * @param volumes - List of Volumes
+   */
+  public void setVolumes(List<VolumeInfo> volumes) {
+    this.volumes = volumes;
+  }
+
+  /**
+   * Returns a JSON string of this object.
+   * After stripping out bytesUsed and bucketCount
+   *
+   * @return String
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  /**
+   * When we serialize a volumeInfo to our database
+   * we will use all fields. However the toJsonString
+   * will strip out bytesUsed and bucketCount from the
+   * volume Info
+   *
+   * @return Json String
+   *
+   * @throws IOException
+   */
+  public String toDBString() throws IOException {
+    return JsonUtils.toJsonString(this);
+  }
+
+  /**
+   * Parses a String to return ListVolumes object.
+   *
+   * @param data - Json String
+   *
+   * @return - ListVolumes
+   *
+   * @throws IOException
+   */
+  public static ListVolumes parse(String data) throws IOException {
+    return READER.readValue(data);
+  }
+
+  /**
+   * Adds a new volume info to the List.
+   *
+   * @param info - VolumeInfo
+   */
+  public void addVolume(VolumeInfo info) {
+    this.volumes.add(info);
+  }
+
+  /**
+   * Sorts the volume names based on volume name.
+   * This is useful when we return the list of volume names
+   */
+  public void sort() {
+    Collections.sort(volumes);
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/VolumeInfo.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/VolumeInfo.java
new file mode 100644
index 0000000..112b27e
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/VolumeInfo.java
@@ -0,0 +1,308 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.response;
+
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonFilter;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
+import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+
+/**
+ * VolumeInfo Class is the Java class that represents
+ * Json when VolumeInfo Call is made.
+ */
+@InterfaceAudience.Private
+public class VolumeInfo implements Comparable<VolumeInfo> {
+
+  static final String VOLUME_INFO = "VOLUME_INFO_FILTER";
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(VolumeInfo.class);
+  private static final ObjectWriter WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    String[] ignorableFieldNames = {"bytesUsed", "bucketCount"};
+
+    FilterProvider filters = new SimpleFilterProvider()
+        .addFilter(VOLUME_INFO, SimpleBeanPropertyFilter
+            .serializeAllExcept(ignorableFieldNames));
+    mapper.setVisibility(PropertyAccessor.FIELD,
+        JsonAutoDetect.Visibility.ANY);
+    mapper.addMixIn(Object.class, MixIn.class);
+
+    mapper.setFilterProvider(filters);
+    WRITER = mapper.writerWithDefaultPrettyPrinter();
+  }
+
+  /**
+   * Custom Json Filter Class.
+   */
+  @JsonFilter(VOLUME_INFO)
+  class MixIn {
+  }
+  private VolumeOwner owner;
+  private OzoneQuota quota;
+  private String volumeName;
+  private String createdOn;
+  private String createdBy;
+
+  private long bytesUsed;
+  private long bucketCount;
+
+
+  /**
+   * Constructor for VolumeInfo.
+   *
+   * @param volumeName - Name of the Volume
+   * @param createdOn _ Date String
+   * @param createdBy - Person who created it
+   */
+  public VolumeInfo(String volumeName, String createdOn, String createdBy) {
+    this.createdOn = createdOn;
+    this.volumeName = volumeName;
+    this.createdBy = createdBy;
+  }
+
+  /**
+   * Constructor for VolumeInfo.
+   */
+  public VolumeInfo() {
+  }
+
+  /**
+   * Returns the name of the person who created this volume.
+   *
+   * @return Name of Admin who created this
+   */
+  public String getCreatedBy() {
+    return createdBy;
+  }
+
+  /**
+   * Sets the user name of the person who created this volume.
+   *
+   * @param createdBy - UserName
+   */
+  public void setCreatedBy(String createdBy) {
+    this.createdBy = createdBy;
+  }
+
+  /**
+   * Gets the date on which this volume was created.
+   *
+   * @return - Date String
+   */
+  public String getCreatedOn() {
+    return createdOn;
+  }
+
+  /**
+   * Sets the date string.
+   *
+   * @param createdOn - Date String
+   */
+  public void setCreatedOn(String createdOn) {
+    this.createdOn = createdOn;
+  }
+
+  /**
+   * Returns the owner info.
+   *
+   * @return - OwnerInfo
+   */
+  public VolumeOwner getOwner() {
+    return owner;
+  }
+
+  /**
+   * Sets the owner.
+   *
+   * @param owner - OwnerInfo
+   */
+  public void setOwner(VolumeOwner owner) {
+    this.owner = owner;
+  }
+
+  /**
+   * Returns the quota information on a volume.
+   *
+   * @return Quota
+   */
+  public OzoneQuota getQuota() {
+    return quota;
+  }
+
+  /**
+   * Sets the quota info.
+   *
+   * @param quota - Quota Info
+   */
+  public void setQuota(OzoneQuota quota) {
+    this.quota = quota;
+  }
+
+  /**
+   * gets the volume name.
+   *
+   * @return - Volume Name
+   */
+  public String getVolumeName() {
+    return volumeName;
+  }
+
+  /**
+   * Sets the volume name.
+   *
+   * @param volumeName - Volume Name
+   */
+  public void setVolumeName(String volumeName) {
+    this.volumeName = volumeName;
+  }
+
+  /**
+   * Returns a JSON string of this object.
+   * After stripping out bytesUsed and bucketCount
+   *
+   * @return String - json string
+   * @throws IOException
+   */
+  public String toJsonString() throws IOException {
+    return WRITER.writeValueAsString(this);
+  }
+
+  /**
+   * When we serialize a volumeInfo to our database
+   * we will use all fields. However the toJsonString
+   * will strip out bytesUsed and bucketCount from the
+   * volume Info
+   *
+   * @return Json String
+   *
+   * @throws IOException
+   */
+  public String toDBString() throws IOException {
+    return JsonUtils.toJsonString(this);
+  }
+
+
+  /**
+   * Comparable Interface.
+   * @param o VolumeInfo Object.
+   * @return Result of comparison
+   */
+  @Override
+  public int compareTo(VolumeInfo o) {
+    return this.volumeName.compareTo(o.getVolumeName());
+  }
+
+  /**
+   * Gets the number of bytesUsed by this volume.
+   *
+   * @return long - Bytes used
+   */
+  public long getBytesUsed() {
+    return bytesUsed;
+  }
+
+  /**
+   * Sets number of bytesUsed by this volume.
+   *
+   * @param bytesUsed - Number of bytesUsed
+   */
+  public void setBytesUsed(long bytesUsed) {
+    this.bytesUsed = bytesUsed;
+  }
+
+  /**
+   * Returns VolumeInfo class from json string.
+   *
+   * @param data - Json String
+   *
+   * @return VolumeInfo
+   *
+   * @throws IOException
+   */
+  public static VolumeInfo parse(String data) throws IOException {
+    return READER.readValue(data);
+  }
+
+  /**
+   * Indicates whether some other object is "equal to" this one.
+   *
+   * @param obj the reference object with which to compare.
+   *
+   * @return {@code true} if this object is the same as the obj
+   * argument; {@code false} otherwise.
+   */
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    VolumeInfo otherInfo = (VolumeInfo) obj;
+    return otherInfo.getVolumeName().equals(this.getVolumeName());
+  }
+
+  /**
+   * Returns a hash code value for the object. This method is
+   * supported for the benefit of hash tables such as those provided by
+   * HashMap.
+   * @return a hash code value for this object.
+   *
+   * @see Object#equals(Object)
+   * @see System#identityHashCode
+   */
+  @Override
+  public int hashCode() {
+    return getVolumeName().hashCode();
+  }
+
+  /**
+   * Total number of buckets under this volume.
+   *
+   * @return - bucketCount
+   */
+  public long getBucketCount() {
+    return bucketCount;
+  }
+
+  /**
+   * Sets the buckets count.
+   *
+   * @param bucketCount - Bucket Count
+   */
+  public void setBucketCount(long bucketCount) {
+    this.bucketCount = bucketCount;
+  }
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/VolumeOwner.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/VolumeOwner.java
new file mode 100644
index 0000000..afb04605
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/VolumeOwner.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.response;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Volume Owner represents the owner of a volume.
+ *
+ * This is a class instead of a string since we might need to extend this class
+ * to support other forms of authentication.
+ */
+@InterfaceAudience.Private
+public class VolumeOwner {
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  private String name;
+
+  /**
+   * Constructor for VolumeOwner.
+   *
+   * @param name - name of the User
+   */
+  public VolumeOwner(String name) {
+    this.name = name;
+  }
+
+  /**
+   * Constructs Volume Owner.
+   */
+  public VolumeOwner() {
+    name = null;
+  }
+
+  /**
+   * Returns the user name.
+   *
+   * @return Name
+   */
+  public String getName() {
+    return name;
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/package-info.java
new file mode 100644
index 0000000..3bf66c8
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/response/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Netty-based HTTP server implementation for Ozone.
+ */
+package org.apache.hadoop.ozone.web.response;
\ No newline at end of file
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
new file mode 100644
index 0000000..22fff56
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
@@ -0,0 +1,227 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.utils;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Locale;
+import java.util.TimeZone;
+import java.util.UUID;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.HddsUtils;
+import org.apache.hadoop.ozone.OzoneConsts;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Set of Utility functions used in ozone.
+ */
+@InterfaceAudience.Private
+public final class OzoneUtils {
+
+  public static final String ENCODING_NAME = "UTF-8";
+  public static final Charset ENCODING = Charset.forName(ENCODING_NAME);
+
+  private OzoneUtils() {
+    // Never constructed
+  }
+
+  /**
+   * Date format that used in ozone. Here the format is thread safe to use.
+   */
+  private static final ThreadLocal<SimpleDateFormat> DATE_FORMAT =
+      new ThreadLocal<SimpleDateFormat>() {
+    @Override
+    protected SimpleDateFormat initialValue() {
+      SimpleDateFormat format = new SimpleDateFormat(
+          OzoneConsts.OZONE_DATE_FORMAT, Locale.US);
+      format.setTimeZone(TimeZone.getTimeZone(OzoneConsts.OZONE_TIME_ZONE));
+
+      return format;
+    }
+  };
+
+  /**
+   * Verifies that max key length is a valid value.
+   *
+   * @param length
+   *          The max key length to be validated
+   *
+   * @throws IllegalArgumentException
+   */
+  public static void verifyMaxKeyLength(String length)
+      throws IllegalArgumentException {
+    int maxKey = 0;
+    try {
+      maxKey = Integer.parseInt(length);
+    } catch (NumberFormatException nfe) {
+      throw new IllegalArgumentException(
+          "Invalid max key length, the vaule should be digital.");
+    }
+
+    if (maxKey <= 0) {
+      throw new IllegalArgumentException(
+          "Invalid max key length, the vaule should be a positive number.");
+    }
+  }
+
+  /**
+   * Returns a random Request ID.
+   *
+   * Request ID is returned to the client as well as flows through the system
+   * facilitating debugging on why a certain request failed.
+   *
+   * @return String random request ID
+   */
+  public static String getRequestID() {
+    return UUID.randomUUID().toString();
+  }
+
+  /**
+   * Return host name if possible.
+   *
+   * @return Host Name or localhost
+   */
+  public static String getHostName() {
+    String host = "localhost";
+    try {
+      host = InetAddress.getLocalHost().getHostName();
+    } catch (UnknownHostException e) {
+      // Ignore the error
+    }
+    return host;
+  }
+
+  /**
+   * Get the path for datanode id file.
+   *
+   * @param conf - Configuration
+   * @return the path of datanode id as string
+   */
+  public static String getDatanodeIdFilePath(Configuration conf) {
+    return HddsUtils.getDatanodeIdFilePath(conf);
+  }
+
+  /**
+   * Convert time in millisecond to a human readable format required in ozone.
+   * @return a human readable string for the input time
+   */
+  public static String formatTime(long millis) {
+    return DATE_FORMAT.get().format(millis);
+  }
+
+  /**
+   * Convert time in ozone date format to millisecond.
+   * @return time in milliseconds
+   */
+  public static long formatDate(String date) throws ParseException {
+    Preconditions.checkNotNull(date, "Date string should not be null.");
+    return DATE_FORMAT.get().parse(date).getTime();
+  }
+
+  public static boolean isOzoneEnabled(Configuration conf) {
+    return HddsUtils.isHddsEnabled(conf);
+  }
+
+
+  /**
+   * verifies that bucket name / volume name is a valid DNS name.
+   *
+   * @param resName Bucket or volume Name to be validated
+   *
+   * @throws IllegalArgumentException
+   */
+  public static void verifyResourceName(String resName)
+      throws IllegalArgumentException {
+
+    if (resName == null) {
+      throw new IllegalArgumentException("Bucket or Volume name is null");
+    }
+
+    if ((resName.length() < OzoneConsts.OZONE_MIN_BUCKET_NAME_LENGTH) ||
+        (resName.length() > OzoneConsts.OZONE_MAX_BUCKET_NAME_LENGTH)) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume length is illegal, " +
+              "valid length is 3-63 characters");
+    }
+
+    if ((resName.charAt(0) == '.') || (resName.charAt(0) == '-')) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume name cannot start with a period or dash");
+    }
+
+    if ((resName.charAt(resName.length() - 1) == '.') ||
+        (resName.charAt(resName.length() - 1) == '-')) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume name cannot end with a period or dash");
+    }
+
+    boolean isIPv4 = true;
+    char prev = (char) 0;
+
+    for (int index = 0; index < resName.length(); index++) {
+      char currChar = resName.charAt(index);
+
+      if (currChar != '.') {
+        isIPv4 = ((currChar >= '0') && (currChar <= '9')) && isIPv4;
+      }
+
+      if (currChar > 'A' && currChar < 'Z') {
+        throw new IllegalArgumentException(
+            "Bucket or Volume name does not support uppercase characters");
+      }
+
+      if ((currChar != '.') && (currChar != '-')) {
+        if ((currChar < '0') || (currChar > '9' && currChar < 'a') ||
+            (currChar > 'z')) {
+          throw new IllegalArgumentException("Bucket or Volume name has an " +
+              "unsupported character : " +
+              currChar);
+        }
+      }
+
+      if ((prev == '.') && (currChar == '.')) {
+        throw new IllegalArgumentException("Bucket or Volume name should not " +
+            "have two contiguous periods");
+      }
+
+      if ((prev == '-') && (currChar == '.')) {
+        throw new IllegalArgumentException(
+            "Bucket or Volume name should not have period after dash");
+      }
+
+      if ((prev == '.') && (currChar == '-')) {
+        throw new IllegalArgumentException(
+            "Bucket or Volume name should not have dash after period");
+      }
+      prev = currChar;
+    }
+
+    if (isIPv4) {
+      throw new IllegalArgumentException(
+          "Bucket or Volume name cannot be an IPv4 address or all numeric");
+    }
+  }
+
+}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/package-info.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/package-info.java
new file mode 100644
index 0000000..178157f
--- /dev/null
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.utils;
diff --git a/hadoop-ozone/common/src/main/proto/KeySpaceManagerProtocol.proto b/hadoop-ozone/common/src/main/proto/KeySpaceManagerProtocol.proto
new file mode 100644
index 0000000..a6026f1
--- /dev/null
+++ b/hadoop-ozone/common/src/main/proto/KeySpaceManagerProtocol.proto
@@ -0,0 +1,457 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * These .proto interfaces are private and unstable.
+ * Please see http://wiki.apache.org/hadoop/Compatibility
+ * for what changes are allowed for a *unstable* .proto interface.
+ */
+
+option java_package = "org.apache.hadoop.ozone.protocol.proto";
+option java_outer_classname = "KeySpaceManagerProtocolProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+package hadoop.ozone;
+
+/**
+This is file contains the protocol to communicate with
+Ozone key space manager. Ozone KSM manages the namespace for ozone.
+This is similar to Namenode for Ozone.
+*/
+
+import "hdfs.proto";
+import "hdds.proto";
+
+enum Status {
+    OK = 1;
+    VOLUME_NOT_UNIQUE = 2;
+    VOLUME_NOT_FOUND = 3;
+    VOLUME_NOT_EMPTY = 4;
+    VOLUME_ALREADY_EXISTS = 5;
+    USER_NOT_FOUND = 6;
+    USER_TOO_MANY_VOLUMES = 7;
+    BUCKET_NOT_FOUND = 8;
+    BUCKET_NOT_EMPTY = 9;
+    BUCKET_ALREADY_EXISTS = 10;
+    KEY_ALREADY_EXISTS = 11;
+    KEY_NOT_FOUND = 12;
+    ACCESS_DENIED = 13;
+    INTERNAL_ERROR = 14;
+}
+
+
+message VolumeInfo {
+    required string adminName = 1;
+    required string ownerName = 2;
+    required string volume = 3;
+    optional uint64 quotaInBytes = 4;
+    repeated hadoop.hdds.KeyValue metadata = 5;
+    repeated OzoneAclInfo volumeAcls = 6;
+    required uint64 creationTime = 7;
+}
+
+/**
+    Creates a volume
+*/
+message CreateVolumeRequest {
+    required VolumeInfo volumeInfo = 1;
+}
+
+message CreateVolumeResponse {
+
+    required Status status = 1;
+}
+
+message VolumeList {
+    repeated string volumeNames = 1;
+}
+
+/**
+    Changes the Volume Properties -- like ownership and quota for a volume.
+*/
+message SetVolumePropertyRequest {
+    required string volumeName = 1;
+    optional string ownerName = 2;
+    optional uint64 quotaInBytes = 3;
+}
+
+message SetVolumePropertyResponse {
+    required Status status = 1;
+}
+
+/**
+ * Checks if the user has specified permissions for the volume
+ */
+message CheckVolumeAccessRequest {
+    required string volumeName = 1;
+    required OzoneAclInfo userAcl = 2;
+}
+
+message CheckVolumeAccessResponse {
+
+    required Status status = 1;
+}
+
+
+/**
+    Returns information about a volume.
+*/
+
+message InfoVolumeRequest {
+    required string volumeName = 1;
+}
+
+message InfoVolumeResponse {
+    required Status status = 1;
+    optional VolumeInfo volumeInfo = 2;
+
+}
+
+/**
+    Deletes an existing volume.
+*/
+message DeleteVolumeRequest {
+    required string volumeName = 1;
+}
+
+message DeleteVolumeResponse {
+    required Status status = 1;
+}
+
+
+/**
+    List Volumes -- List all volumes in the cluster or by user.
+*/
+
+message ListVolumeRequest {
+    enum Scope {
+        USER_VOLUMES = 1;   // User volumes -- called by user
+        VOLUMES_BY_USER = 2; // User volumes - called by Admin
+        VOLUMES_BY_CLUSTER = 3; // All volumes in the cluster
+    }
+    required Scope scope = 1;
+    optional string userName = 2;
+    optional string prefix = 3;
+    optional string prevKey = 4;
+    optional uint32 maxKeys = 5;
+}
+
+message ListVolumeResponse {
+    required Status status = 1;
+    repeated VolumeInfo volumeInfo = 2;
+}
+
+message BucketInfo {
+    required string volumeName = 1;
+    required string bucketName = 2;
+    repeated OzoneAclInfo acls = 3;
+    required bool isVersionEnabled = 4 [default = false];
+    required hadoop.hdfs.StorageTypeProto storageType = 5 [default = DISK];
+    required uint64 creationTime = 6;
+}
+
+message BucketArgs {
+    required string volumeName = 1;
+    required string bucketName = 2;
+    repeated OzoneAclInfo addAcls = 3;
+    repeated OzoneAclInfo removeAcls = 4;
+    optional bool isVersionEnabled = 5;
+    optional hadoop.hdfs.StorageTypeProto storageType = 6;
+}
+
+message OzoneAclInfo {
+    enum OzoneAclType {
+        USER = 1;
+        GROUP = 2;
+        WORLD = 3;
+    }
+    enum OzoneAclRights {
+        READ = 1;
+        WRITE = 2;
+        READ_WRITE = 3;
+    }
+    required OzoneAclType type = 1;
+    required string name = 2;
+    required OzoneAclRights rights = 3;
+}
+
+message CreateBucketRequest {
+    required BucketInfo bucketInfo = 1;
+}
+
+message CreateBucketResponse {
+    required Status status = 1;
+}
+
+message InfoBucketRequest {
+    required string volumeName = 1;
+    required string bucketName = 2;
+}
+
+message InfoBucketResponse {
+    required Status status = 1;
+    optional BucketInfo bucketInfo = 2;
+}
+
+message ListBucketsRequest {
+    required string volumeName = 1;
+    optional string startKey = 2;
+    optional string prefix = 3;
+    optional int32 count = 4;
+}
+
+message ListBucketsResponse {
+    required Status status = 1;
+    repeated BucketInfo bucketInfo = 2;
+}
+
+message KeyArgs {
+    required string volumeName = 1;
+    required string bucketName = 2;
+    required string keyName = 3;
+    optional uint64 dataSize = 4;
+    optional hadoop.hdds.ReplicationType type = 5;
+    optional hadoop.hdds.ReplicationFactor factor = 6;
+}
+
+message KeyLocation {
+    required string blockID = 1;
+    required string containerName = 2;
+    required bool shouldCreateContainer = 3;
+    required uint64 offset = 4;
+    required uint64 length = 5;
+    // indicated at which version this block gets created.
+    optional uint64 createVersion = 6;
+}
+
+message KeyLocationList {
+    optional uint64 version = 1;
+    repeated KeyLocation keyLocations = 2;
+}
+
+message KeyInfo {
+    required string volumeName = 1;
+    required string bucketName = 2;
+    required string keyName = 3;
+    required uint64 dataSize = 4;
+    repeated KeyLocationList keyLocationList = 5;
+    required uint64 creationTime = 6;
+    required uint64 modificationTime = 7;
+    optional uint64 latestVersion = 8;
+}
+
+message LocateKeyRequest {
+    required KeyArgs keyArgs = 1;
+}
+
+message LocateKeyResponse {
+    required Status status = 1;
+    optional KeyInfo keyInfo = 2;
+    // clients' followup request may carry this ID for stateful operations (similar
+    // to a cookie).
+    optional uint32 ID = 3;
+    // TODO : allow specifiying a particular version to read.
+    optional uint64 openVersion = 4;
+}
+
+message SetBucketPropertyRequest {
+    required BucketArgs bucketArgs = 1;
+}
+
+message SetBucketPropertyResponse {
+    required Status status = 1;
+}
+
+message DeleteBucketRequest {
+    required string volumeName = 1;
+    required string bucketName = 2;
+}
+
+message DeleteBucketResponse {
+    required Status status = 1;
+}
+
+message ListKeysRequest {
+    required string volumeName = 1;
+    required string bucketName = 2;
+    optional string startKey = 3;
+    optional string prefix = 4;
+    optional int32 count = 5;
+}
+
+message ListKeysResponse {
+    required Status status = 1;
+    repeated KeyInfo keyInfo = 2;
+}
+
+message AllocateBlockRequest {
+    required KeyArgs keyArgs = 1;
+    required uint32 clientID = 2;
+}
+
+message AllocateBlockResponse {
+    required Status status = 1;
+    required KeyLocation keyLocation = 2;
+}
+
+message CommitKeyRequest {
+    required KeyArgs keyArgs = 1;
+    required uint32 clientID = 2;
+}
+
+message CommitKeyResponse {
+    required Status status = 1;
+}
+
+message ServiceListRequest {
+}
+
+message ServiceListResponse {
+    required Status status = 1;
+    repeated ServiceInfo serviceInfo = 2;
+}
+
+message ServicePort {
+    enum Type {
+        RPC = 1;
+        HTTP = 2;
+        HTTPS = 3;
+        RATIS = 4;
+    };
+    required Type type = 1;
+    required uint32 value = 2;
+}
+
+message ServiceInfo {
+    required hadoop.hdds.NodeType nodeType = 1;
+    required string hostname = 2;
+    repeated ServicePort servicePorts = 3;
+}
+
+/**
+ The KSM service that takes care of Ozone namespace.
+*/
+service KeySpaceManagerService {
+
+    /**
+        Creates a Volume.
+    */
+    rpc createVolume(CreateVolumeRequest)
+        returns(CreateVolumeResponse);
+
+    /**
+        Allows modificiation of volume properties.
+    */
+    rpc setVolumeProperty(SetVolumePropertyRequest)
+        returns (SetVolumePropertyResponse);
+
+    /**
+        Checks if the specified volume is accesible by the specified user.
+    */
+    rpc checkVolumeAccess(CheckVolumeAccessRequest)
+        returns (CheckVolumeAccessResponse);
+
+    /**
+        Gets Volume information.
+    */
+    rpc infoVolume(InfoVolumeRequest)
+        returns(InfoVolumeResponse);
+    /**
+        Deletes a volume if it is empty.
+    */
+    rpc deleteVolume(DeleteVolumeRequest)
+        returns (DeleteVolumeResponse);
+
+    /**
+        Lists Volumes
+    */
+    rpc listVolumes(ListVolumeRequest)
+        returns (ListVolumeResponse);
+
+    /**
+        Creates a Bucket.
+    */
+    rpc createBucket(CreateBucketRequest)
+        returns(CreateBucketResponse);
+
+    /**
+        Get Bucket information.
+    */
+    rpc infoBucket(InfoBucketRequest)
+        returns(InfoBucketResponse);
+
+    /**
+        Sets bucket properties.
+    */
+    rpc setBucketProperty(SetBucketPropertyRequest)
+        returns(SetBucketPropertyResponse);
+
+    /**
+        Get key.
+    */
+    rpc createKey(LocateKeyRequest)
+        returns(LocateKeyResponse);
+
+    /**
+       Look up for an existing key.
+    */
+    rpc lookupKey(LocateKeyRequest)
+        returns(LocateKeyResponse);
+
+    /**
+       Delete an existing key.
+    */
+    rpc deleteKey(LocateKeyRequest)
+        returns(LocateKeyResponse);
+
+    /**
+       Deletes a bucket from volume if it is empty.
+    */
+    rpc deleteBucket(DeleteBucketRequest)
+        returns (DeleteBucketResponse);
+
+    /**
+       List Buckets.
+    */
+    rpc listBuckets(ListBucketsRequest)
+    returns(ListBucketsResponse);
+
+    /**
+       List Keys.
+    */
+    rpc listKeys(ListKeysRequest)
+    returns(ListKeysResponse);
+
+    /**
+      Commit a key.
+    */
+    rpc commitKey(CommitKeyRequest)
+    returns(CommitKeyResponse);
+
+    /**
+      Allocate a new block for a key.
+    */
+    rpc allocateBlock(AllocateBlockRequest)
+    returns(AllocateBlockResponse);
+
+    /**
+      Returns list of Ozone services with its configuration details.
+    */
+    rpc getServiceList(ServiceListRequest)
+    returns(ServiceListResponse);
+}
diff --git a/hadoop-ozone/common/src/main/shellprofile.d/hadoop-ozone.sh b/hadoop-ozone/common/src/main/shellprofile.d/hadoop-ozone.sh
new file mode 100644
index 0000000..2cd2bb3
--- /dev/null
+++ b/hadoop-ozone/common/src/main/shellprofile.d/hadoop-ozone.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [[ "${HADOOP_SHELL_EXECNAME}" = ozone ]]; then
+   hadoop_add_profile ozone
+fi
+
+
+## @description  Profile for hdds/ozone components.
+## @audience     private
+## @stability    evolving
+function _ozone_hadoop_classpath
+{
+  #
+  # get all of the ozone jars+config in the path
+  #
+
+  if [[ -d "${HADOOP_HDFS_HOME}/${HDDS_DIR}/webapps" ]]; then
+    hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDDS_DIR}"
+  fi
+
+  if [[ -d "${HADOOP_HDFS_HOME}/${HDDS_DIR}/webapps" ]]; then
+    hadoop_add_classpath "${HADOOP_HDFS_HOME}/${OZONE_DIR}"
+  fi
+
+  hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDDS_LIB_JARS_DIR}"'/*'
+  hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDDS_DIR}"'/*'
+  hadoop_add_classpath "${HADOOP_HDFS_HOME}/${OZONE_LIB_JARS_DIR}"'/*'
+  hadoop_add_classpath "${HADOOP_HDFS_HOME}/${OZONE_DIR}"'/*'
+
+}
diff --git a/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestBucketInfo.java b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestBucketInfo.java
new file mode 100644
index 0000000..2e69922
--- /dev/null
+++ b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestBucketInfo.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Test Ozone Bucket Info operation.
+ */
+public class TestBucketInfo {
+  @Test
+  public void testBucketInfoJson() throws IOException {
+    BucketInfo bucketInfo = new BucketInfo("volumeName", "bucketName");
+    String bucketInfoString = bucketInfo.toJsonString();
+    BucketInfo newBucketInfo = BucketInfo.parse(bucketInfoString);
+    assert(bucketInfo.equals(newBucketInfo));
+  }
+
+  @Test
+  public void testBucketInfoDBString() throws IOException {
+    BucketInfo bucketInfo = new BucketInfo("volumeName", "bucketName");
+    String bucketInfoString = bucketInfo.toDBString();
+    BucketInfo newBucketInfo = BucketInfo.parse(bucketInfoString);
+    assert(bucketInfo.equals(newBucketInfo));
+  }
+
+  @Test
+  public void testBucketInfoAddAcls() throws IOException {
+    BucketInfo bucketInfo = new BucketInfo("volumeName", "bucketName");
+    String bucketInfoString = bucketInfo.toDBString();
+    BucketInfo newBucketInfo = BucketInfo.parse(bucketInfoString);
+    assert(bucketInfo.equals(newBucketInfo));
+    List<OzoneAcl> aclList = new LinkedList<>();
+
+    aclList.add(OzoneAcl.parseAcl("user:bilbo:r"));
+    aclList.add(OzoneAcl.parseAcl("user:samwise:rw"));
+    newBucketInfo.setAcls(aclList);
+
+    assert(newBucketInfo.getAcls() != null);
+    assert(newBucketInfo.getAcls().size() == 2);
+  }
+
+
+  @Test
+  public void testBucketInfoVersionAndType() throws IOException {
+    BucketInfo bucketInfo = new BucketInfo("volumeName", "bucketName");
+    bucketInfo.setVersioning(OzoneConsts.Versioning.ENABLED);
+    bucketInfo.setStorageType(StorageType.DISK);
+
+    String bucketInfoString = bucketInfo.toDBString();
+
+    BucketInfo newBucketInfo = BucketInfo.parse(bucketInfoString);
+    assert(bucketInfo.equals(newBucketInfo));
+  }
+
+}
diff --git a/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestQuota.java b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestQuota.java
new file mode 100644
index 0000000..d777d0c
--- /dev/null
+++ b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestQuota.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Set;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test Ozone Volume Quota.
+ */
+public class TestQuota {
+  @Test
+  public void testParseQuota() {
+    HashMap<String, Boolean> testMatrix;
+    testMatrix = new HashMap<String, Boolean>();
+
+    testMatrix.put("10TB", Boolean.TRUE);
+    testMatrix.put("1 TB", Boolean.TRUE);
+    testMatrix.put("0MB", Boolean.TRUE);
+    testMatrix.put("0 TB", Boolean.TRUE);
+    testMatrix.put("    1000MB   ", Boolean.TRUE);
+
+    testMatrix.put("    1000MBMB   ", Boolean.FALSE);
+    testMatrix.put("    1000MB00   ", Boolean.FALSE);
+    testMatrix.put("1000ZMB", Boolean.FALSE);
+    testMatrix.put("MB1000", Boolean.FALSE);
+    testMatrix.put("9999", Boolean.FALSE);
+    testMatrix.put("1", Boolean.FALSE);
+    testMatrix.put("remove", Boolean.FALSE);
+    testMatrix.put("1UNDEFINED", Boolean.FALSE);
+    testMatrix.put(null, Boolean.FALSE);
+    testMatrix.put("", Boolean.FALSE);
+    testMatrix.put("-1000MB", Boolean.FALSE);
+    testMatrix.put("1024 bytes", Boolean.TRUE);
+    testMatrix.put("1bytes", Boolean.TRUE);
+    testMatrix.put("0bytes", Boolean.TRUE);
+    testMatrix.put("10000 BYTES", Boolean.TRUE);
+    testMatrix.put("BYTESbytes", Boolean.FALSE);
+    testMatrix.put("bytes", Boolean.FALSE);
+
+    Set<String> keys = testMatrix.keySet();
+    for (String key : keys) {
+      if (testMatrix.get(key)) {
+        OzoneQuota.parseQuota(key);
+      } else {
+        try {
+          OzoneQuota.parseQuota(key);
+          // should never get here since the isValid call will throw
+          fail(key);
+          fail("An exception was expected but did not happen.");
+        } catch (IllegalArgumentException e) {
+
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testVerifyQuota() {
+    OzoneQuota qt = OzoneQuota.parseQuota("10TB");
+    assertEquals(qt.getSize(), 10);
+    assertEquals(qt.getUnit(), OzoneQuota.Units.TB);
+    assertEquals(qt.sizeInBytes(), 10L * (1024L * 1024L * 1024L * 1024L));
+
+    qt = OzoneQuota.parseQuota("10MB");
+    assertEquals(qt.getSize(), 10);
+    assertEquals(qt.getUnit(), OzoneQuota.Units.MB);
+    assertEquals(qt.sizeInBytes(), 10L * (1024L * 1024L));
+
+    qt = OzoneQuota.parseQuota("10GB");
+    assertEquals(qt.getSize(), 10);
+    assertEquals(qt.getUnit(), OzoneQuota.Units.GB);
+    assertEquals(qt.sizeInBytes(), 10L * (1024L * 1024L * 1024L));
+
+    qt = OzoneQuota.parseQuota("10BYTES");
+    assertEquals(qt.getSize(), 10);
+    assertEquals(qt.getUnit(), OzoneQuota.Units.BYTES);
+    assertEquals(qt.sizeInBytes(), 10L);
+
+    OzoneQuota emptyQuota = new OzoneQuota();
+    assertEquals(emptyQuota.sizeInBytes(), -1L);
+    assertEquals(emptyQuota.getSize(), 0);
+    assertEquals(emptyQuota.getUnit(), OzoneQuota.Units.UNDEFINED);
+  }
+
+  @Test
+  public void testVerifyRemove() {
+    assertTrue(OzoneQuota.isRemove("remove"));
+    assertFalse(OzoneQuota.isRemove("not remove"));
+    assertFalse(OzoneQuota.isRemove(null));
+  }
+}
diff --git a/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestUtils.java b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestUtils.java
new file mode 100644
index 0000000..d3f8f5e
--- /dev/null
+++ b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestUtils.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.apache.hadoop.ozone.web.utils.OzoneUtils.getRequestID;
+import static org.apache.hadoop.ozone.web.utils.OzoneUtils.verifyResourceName;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test Ozone Utility operations like verifying resource name.
+ */
+public class TestUtils {
+
+  /**
+   * Tests if the bucket name handling is correct.
+   */
+  @Test
+  public  void testValidBucketNames() {
+    HashMap<String, Boolean> testMatrix;
+    // Init the Table with Strings and Expected Return values
+    testMatrix = new HashMap<String, Boolean>();
+
+    testMatrix.put("bucket-.ozone.self", Boolean.FALSE);
+    testMatrix.put("bucket.-ozone.self", Boolean.FALSE);
+    testMatrix.put(".bucket.ozone.self", Boolean.FALSE);
+    testMatrix.put("bucket.ozone.self.", Boolean.FALSE);
+    testMatrix.put("bucket..ozone.self", Boolean.FALSE);
+    testMatrix.put("192.1.1.1", Boolean.FALSE);
+    testMatrix.put("ab", Boolean.FALSE);
+    testMatrix.put("bucket.ozone.self.this.is.a.really.long.name.that."
+        + "is.more.than.sixty.three.characters.long.for.sure", Boolean.FALSE);
+    testMatrix.put(null, Boolean.FALSE);
+    testMatrix.put("bucket@$", Boolean.FALSE);
+    testMatrix.put("BUCKET", Boolean.FALSE);
+    testMatrix.put("bucket .ozone.self", Boolean.FALSE);
+    testMatrix.put("       bucket.ozone.self", Boolean.FALSE);
+    testMatrix.put("bucket.ozone.self-", Boolean.FALSE);
+    testMatrix.put("-bucket.ozone.self", Boolean.FALSE);
+
+    testMatrix.put("bucket", Boolean.TRUE);
+    testMatrix.put("bucket.ozone.self", Boolean.TRUE);
+    testMatrix.put("bucket.ozone.self", Boolean.TRUE);
+    testMatrix.put("bucket-name.ozone.self", Boolean.TRUE);
+    testMatrix.put("bucket.1.ozone.self", Boolean.TRUE);
+
+    Set<String> keys = testMatrix.keySet();
+    for (String key : keys) {
+      if (testMatrix.get(key)) {
+
+        // For valid names there should be no exceptions at all
+        verifyResourceName(key);
+      } else {
+        try {
+          verifyResourceName(key);
+          // should never get here since the isValid call will throw
+          fail("An exception was expected but did not happen.");
+        } catch (IllegalArgumentException e) {
+
+        }
+      }
+    }
+  }
+
+  /**
+   *  Just calls Request ID many times and assert we
+   *  got different values, ideally this should be
+   *  run under parallel threads. Since the function under
+   *  test has no external dependencies it is assumed
+   *  that this test is good enough.
+   */
+  @Test
+  public void testRequestIDisRandom() {
+    HashSet<String> set = new HashSet<>();
+    for (int i = 0; i < 1000; i++) {
+      assertTrue(set.add(getRequestID()));
+    }
+  }
+}
diff --git a/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestVolumeStructs.java b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestVolumeStructs.java
new file mode 100644
index 0000000..b433be6
--- /dev/null
+++ b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/TestVolumeStructs.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+
+package org.apache.hadoop.ozone.web;
+
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.response.VolumeOwner;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Test Ozone Volume info structure.
+ */
+public class TestVolumeStructs {
+
+  @Test
+  public void testVolumeInfoParse() throws IOException {
+    VolumeInfo volInfo =
+        new VolumeInfo("testvol", "Thu, Apr 9, 2015 10:23:45 GMT", "gandalf");
+    VolumeOwner owner = new VolumeOwner("bilbo");
+    volInfo.setOwner(owner);
+    String jString = volInfo.toJsonString();
+    VolumeInfo newVollInfo = VolumeInfo.parse(jString);
+    String one = volInfo.toJsonString();
+    String two = newVollInfo.toJsonString();
+
+    assertEquals(volInfo.toJsonString(), newVollInfo.toJsonString());
+  }
+
+  @Test
+  public void testVolumeInfoValue() throws IOException {
+    String createdOn = "Thu, Apr 9, 2015 10:23:45 GMT";
+    String createdBy = "gandalf";
+    VolumeInfo volInfo = new VolumeInfo("testvol", createdOn, createdBy);
+    assertEquals(volInfo.getCreatedBy(), createdBy);
+    assertEquals(volInfo.getCreatedOn(), createdOn);
+  }
+
+
+  @Test
+  public void testVolumeListParse() throws IOException {
+    ListVolumes list = new ListVolumes();
+    for (int x = 0; x < 100; x++) {
+      VolumeInfo volInfo = new VolumeInfo("testvol" + Integer.toString(x),
+          "Thu, Apr 9, 2015 10:23:45 GMT", "gandalf");
+      list.addVolume(volInfo);
+    }
+    list.sort();
+    String listString = list.toJsonString();
+    ListVolumes newList = ListVolumes.parse(listString);
+    assertEquals(list.toJsonString(), newList.toJsonString());
+  }
+}
diff --git a/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/package-info.java b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/package-info.java
new file mode 100644
index 0000000..ddbc30e
--- /dev/null
+++ b/hadoop-ozone/common/src/test/java/org/apache/hadoop/ozone/web/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web;
+/**
+ * Unit tests of generic ozone web app and rest utils.
+ */
diff --git a/hadoop-ozone/integration-test/pom.xml b/hadoop-ozone/integration-test/pom.xml
new file mode 100644
index 0000000..95f243b
--- /dev/null
+++ b/hadoop-ozone/integration-test/pom.xml
@@ -0,0 +1,86 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-ozone</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-ozone-integration-test</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone Integration test package</description>
+  <name>Apache Hadoop Ozone integration tests</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-ozone-manager</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-objectstore-service</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-tools</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-ozone-manager</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.openjdk.jmh</groupId>
+      <artifactId>jmh-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.openjdk.jmh</groupId>
+      <artifactId>jmh-generator-annprocess</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+
+  </dependencies>
+</project>
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerStateManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerStateManager.java
new file mode 100644
index 0000000..87d203e
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerStateManager.java
@@ -0,0 +1,300 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdds.scm.container;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.NavigableSet;
+import java.util.Random;
+
+/**
+ * Tests for ContainerStateManager.
+ */
+public class TestContainerStateManager {
+
+  private OzoneConfiguration conf;
+  private MiniOzoneCluster cluster;
+  private XceiverClientManager xceiverClientManager;
+  private StorageContainerManager scm;
+  private Mapping scmContainerMapping;
+  private ContainerStateManager containerStateManager;
+  private String containerOwner = "OZONE";
+
+
+  @Before
+  public void setup() throws Exception {
+    conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(1).build();
+    cluster.waitForClusterToBeReady();
+    xceiverClientManager = new XceiverClientManager(conf);
+    scm = cluster.getStorageContainerManager();
+    scmContainerMapping = scm.getScmContainerManager();
+    containerStateManager = scmContainerMapping.getStateManager();
+  }
+
+  @After
+  public void cleanUp() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testAllocateContainer() throws IOException {
+    // Allocate a container and verify the container info
+    String container1 = "container" + RandomStringUtils.randomNumeric(5);
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container1, containerOwner);
+    ContainerInfo info = containerStateManager
+        .getMatchingContainer(OzoneConsts.GB * 3, containerOwner,
+            xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.ALLOCATED);
+    Assert.assertEquals(container1, info.getContainerName());
+    Assert.assertEquals(OzoneConsts.GB * 3, info.getAllocatedBytes());
+    Assert.assertEquals(containerOwner, info.getOwner());
+    Assert.assertEquals(xceiverClientManager.getType(),
+        info.getPipeline().getType());
+    Assert.assertEquals(xceiverClientManager.getFactor(),
+        info.getPipeline().getFactor());
+    Assert.assertEquals(HddsProtos.LifeCycleState.ALLOCATED, info.getState());
+
+    // Check there are two containers in ALLOCATED state after allocation
+    String container2 = "container" + RandomStringUtils.randomNumeric(5);
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container2, containerOwner);
+    int numContainers = containerStateManager
+        .getMatchingContainerIDs(containerOwner,
+            xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.ALLOCATED).size();
+    Assert.assertEquals(2, numContainers);
+  }
+
+  @Test
+  public void testContainerStateManagerRestart() throws IOException {
+    // Allocate 5 containers in ALLOCATED state and 5 in CREATING state
+    String cname = "container" + RandomStringUtils.randomNumeric(5);
+    for (int i = 0; i < 10; i++) {
+      scm.allocateContainer(xceiverClientManager.getType(),
+          xceiverClientManager.getFactor(), cname + i, containerOwner);
+      if (i >= 5) {
+        scm.getScmContainerManager()
+            .updateContainerState(cname + i, HddsProtos.LifeCycleEvent.CREATE);
+      }
+    }
+
+    // New instance of ContainerStateManager should load all the containers in
+    // container store.
+    ContainerStateManager stateManager =
+        new ContainerStateManager(conf, scmContainerMapping
+        );
+    int containers = stateManager
+        .getMatchingContainerIDs(containerOwner,
+            xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.ALLOCATED).size();
+    Assert.assertEquals(5, containers);
+    containers = stateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.CREATING).size();
+    Assert.assertEquals(5, containers);
+  }
+
+  @Test
+  public void testGetMatchingContainer() throws IOException {
+    String container1 = "container-01234";
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container1, containerOwner);
+    scmContainerMapping.updateContainerState(container1,
+        HddsProtos.LifeCycleEvent.CREATE);
+    scmContainerMapping.updateContainerState(container1,
+        HddsProtos.LifeCycleEvent.CREATED);
+
+    String container2 = "container-56789";
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container2, containerOwner);
+
+    ContainerInfo info = containerStateManager
+        .getMatchingContainer(OzoneConsts.GB * 3, containerOwner,
+            xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.OPEN);
+    Assert.assertEquals(container1, info.getContainerName());
+
+    info = containerStateManager
+        .getMatchingContainer(OzoneConsts.GB * 3, containerOwner,
+            xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.ALLOCATED);
+    Assert.assertEquals(container2, info.getContainerName());
+
+    scmContainerMapping.updateContainerState(container2,
+        HddsProtos.LifeCycleEvent.CREATE);
+    scmContainerMapping.updateContainerState(container2,
+        HddsProtos.LifeCycleEvent.CREATED);
+
+    // space has already been allocated in container1, now container 2 should
+    // be chosen.
+    info = containerStateManager
+        .getMatchingContainer(OzoneConsts.GB * 3, containerOwner,
+            xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.OPEN);
+    Assert.assertEquals(container2, info.getContainerName());
+  }
+
+  @Test
+  public void testUpdateContainerState() throws IOException {
+    NavigableSet<ContainerID> containerList = containerStateManager
+        .getMatchingContainerIDs(containerOwner,
+            xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+            HddsProtos.LifeCycleState.ALLOCATED);
+    int containers = containerList == null ? 0 : containerList.size();
+    Assert.assertEquals(0, containers);
+
+    // Allocate container1 and update its state from ALLOCATED -> CREATING ->
+    // OPEN -> CLOSING -> CLOSED -> DELETING -> DELETED
+    String container1 = "container" + RandomStringUtils.randomNumeric(5);
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container1, containerOwner);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.ALLOCATED).size();
+    Assert.assertEquals(1, containers);
+
+    scmContainerMapping.updateContainerState(container1,
+        HddsProtos.LifeCycleEvent.CREATE);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.CREATING).size();
+    Assert.assertEquals(1, containers);
+
+    scmContainerMapping.updateContainerState(container1,
+        HddsProtos.LifeCycleEvent.CREATED);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.OPEN).size();
+    Assert.assertEquals(1, containers);
+
+    scmContainerMapping
+        .updateContainerState(container1, HddsProtos.LifeCycleEvent.FINALIZE);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.CLOSING).size();
+    Assert.assertEquals(1, containers);
+
+    scmContainerMapping
+        .updateContainerState(container1, HddsProtos.LifeCycleEvent.CLOSE);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.CLOSED).size();
+    Assert.assertEquals(1, containers);
+
+    scmContainerMapping
+        .updateContainerState(container1, HddsProtos.LifeCycleEvent.DELETE);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.DELETING).size();
+    Assert.assertEquals(1, containers);
+
+    scmContainerMapping
+        .updateContainerState(container1, HddsProtos.LifeCycleEvent.CLEANUP);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.DELETED).size();
+    Assert.assertEquals(1, containers);
+
+    // Allocate container1 and update its state from ALLOCATED -> CREATING ->
+    // DELETING
+    String container2 = "container" + RandomStringUtils.randomNumeric(5);
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container2, containerOwner);
+    scmContainerMapping.updateContainerState(container2,
+        HddsProtos.LifeCycleEvent.CREATE);
+    scmContainerMapping
+        .updateContainerState(container2, HddsProtos.LifeCycleEvent.TIMEOUT);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.DELETING).size();
+    Assert.assertEquals(1, containers);
+
+    // Allocate container1 and update its state from ALLOCATED -> CREATING ->
+    // OPEN -> CLOSING -> CLOSED
+    String container3 = "container" + RandomStringUtils.randomNumeric(5);
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container3, containerOwner);
+    scmContainerMapping.updateContainerState(container3,
+        HddsProtos.LifeCycleEvent.CREATE);
+    scmContainerMapping.updateContainerState(container3,
+        HddsProtos.LifeCycleEvent.CREATED);
+    scmContainerMapping.updateContainerState(container3,
+        HddsProtos.LifeCycleEvent.FINALIZE);
+    scmContainerMapping
+        .updateContainerState(container3, HddsProtos.LifeCycleEvent.CLOSE);
+    containers = containerStateManager.getMatchingContainerIDs(containerOwner,
+        xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+        HddsProtos.LifeCycleState.CLOSED).size();
+    Assert.assertEquals(1, containers);
+  }
+
+  @Test
+  public void testUpdatingAllocatedBytes() throws Exception {
+    String container1 = "container" + RandomStringUtils.randomNumeric(5);
+    scm.allocateContainer(xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), container1, containerOwner);
+    scmContainerMapping.updateContainerState(container1,
+        HddsProtos.LifeCycleEvent.CREATE);
+    scmContainerMapping.updateContainerState(container1,
+        HddsProtos.LifeCycleEvent.CREATED);
+
+    Random ran = new Random();
+    long allocatedSize = 0;
+    for (int i = 0; i<5; i++) {
+      long size = Math.abs(ran.nextLong() % OzoneConsts.GB);
+      allocatedSize += size;
+      // trigger allocating bytes by calling getMatchingContainer
+      ContainerInfo info = containerStateManager
+          .getMatchingContainer(size, containerOwner,
+              xceiverClientManager.getType(), xceiverClientManager.getFactor(),
+              HddsProtos.LifeCycleState.OPEN);
+      Assert.assertEquals(container1, info.getContainerName());
+
+      ContainerMapping containerMapping =
+          (ContainerMapping)scmContainerMapping;
+      // manually trigger a flush, this will persist the allocated bytes value
+      // to disk
+      containerMapping.flushContainerInfo();
+
+      Charset utf8 = Charset.forName("UTF-8");
+      // the persisted value should always be equal to allocated size.
+      byte[] containerBytes =
+          containerMapping.getContainerStore().get(container1.getBytes(utf8));
+      HddsProtos.SCMContainerInfo infoProto =
+          HddsProtos.SCMContainerInfo.PARSER.parseFrom(containerBytes);
+      ContainerInfo currentInfo = ContainerInfo.fromProtobuf(infoProto);
+      Assert.assertEquals(allocatedSize, currentInfo.getAllocatedBytes());
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneCluster.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneCluster.java
new file mode 100644
index 0000000..8c49f65
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneCluster.java
@@ -0,0 +1,322 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.ksm.KeySpaceManager;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.test.GenericTestUtils;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * Interface used for MiniOzoneClusters.
+ */
+public interface MiniOzoneCluster {
+
+  /**
+   * Returns the configuration object associated with the MiniOzoneCluster.
+   *
+   * @return Configuration
+   */
+  Configuration getConf();
+
+  /**
+   * Waits for the cluster to be ready, this call blocks till all the
+   * configured {@link HddsDatanodeService} registers with
+   * {@link StorageContainerManager}.
+   *
+   * @throws TimeoutException In case of timeout
+   * @throws InterruptedException In case of interrupt while waiting
+   */
+  void waitForClusterToBeReady() throws TimeoutException, InterruptedException;
+
+  /**
+   * Waits/blocks till the cluster is out of chill mode.
+   *
+   * @throws TimeoutException TimeoutException In case of timeout
+   * @throws InterruptedException In case of interrupt while waiting
+   */
+  void waitTobeOutOfChillMode() throws TimeoutException, InterruptedException;
+
+  /**
+   * Returns {@link StorageContainerManager} associated with this
+   * {@link MiniOzoneCluster} instance.
+   *
+   * @return {@link StorageContainerManager} instance
+   */
+  StorageContainerManager getStorageContainerManager();
+
+  /**
+   * Returns {@link KeySpaceManager} associated with this
+   * {@link MiniOzoneCluster} instance.
+   *
+   * @return {@link KeySpaceManager} instance
+   */
+  KeySpaceManager getKeySpaceManager();
+
+  /**
+   * Returns the list of {@link HddsDatanodeService} which are part of this
+   * {@link MiniOzoneCluster} instance.
+   *
+   * @return List of {@link HddsDatanodeService}
+   */
+  List<HddsDatanodeService> getHddsDatanodes();
+
+  /**
+   * Returns an {@link OzoneClient} to access the {@link MiniOzoneCluster}.
+   *
+   * @return {@link OzoneClient}
+   * @throws IOException
+   */
+  OzoneClient getClient() throws IOException;
+
+  /**
+   * Returns an RPC based {@link OzoneClient} to access the
+   * {@link MiniOzoneCluster}.
+   *
+   * @return {@link OzoneClient}
+   * @throws IOException
+   */
+  OzoneClient getRpcClient() throws IOException;
+
+  /**
+   * Returns an REST based {@link OzoneClient} to access the
+   * {@link MiniOzoneCluster}.
+   *
+   * @return {@link OzoneClient}
+   * @throws IOException
+   */
+  OzoneClient getRestClient() throws IOException;
+
+  /**
+   * Returns StorageContainerLocationClient to communicate with
+   * {@link StorageContainerManager} associated with the MiniOzoneCluster.
+   *
+   * @return StorageContainerLocation Client
+   * @throws IOException
+   */
+  StorageContainerLocationProtocolClientSideTranslatorPB getStorageContainerLocationClient()
+      throws IOException;
+
+  /**
+   * Restarts StorageContainerManager instance.
+   *
+   * @throws IOException
+   */
+  void restartStorageContainerManager() throws IOException;
+
+  /**
+   * Restarts KeySpaceManager instance.
+   *
+   * @throws IOException
+   */
+  void restartKeySpaceManager() throws IOException;
+
+  /**
+   * Restart a particular HddsDatanode.
+   *
+   * @param i index of HddsDatanode in the MiniOzoneCluster
+   */
+  void restartHddsDatanode(int i);
+
+  /**
+   * Shutdown a particular HddsDatanode.
+   *
+   * @param i index of HddsDatanode in the MiniOzoneCluster
+   */
+  void shutdownHddsDatanode(int i);
+
+  /**
+   * Shutdown the MiniOzoneCluster.
+   */
+  void shutdown();
+
+  /**
+   * Returns the Builder to construct MiniOzoneCluster.
+   *
+   * @param conf OzoneConfiguration
+   *
+   * @return MiniOzoneCluster builder
+   */
+  static Builder newBuilder(OzoneConfiguration conf) {
+    return new MiniOzoneClusterImpl.Builder(conf);
+  }
+
+  /**
+   * Builder class for MiniOzoneCluster.
+   */
+  abstract class Builder {
+
+    protected static final int DEFAULT_HB_INTERVAL_MS = 1000;
+    protected static final int DEFAULT_HB_PROCESSOR_INTERVAL_MS = 100;
+
+    protected final OzoneConfiguration conf;
+    protected final String path;
+
+    protected String clusterId;
+
+    protected Optional<Boolean> enableTrace = Optional.of(false);
+    protected Optional<Integer> hbInterval = Optional.empty();
+    protected Optional<Integer> hbProcessorInterval = Optional.empty();
+    protected Optional<String> scmId = Optional.empty();
+    protected Optional<String> ksmId = Optional.empty();
+
+    protected Boolean ozoneEnabled = true;
+    protected Boolean randomContainerPort = true;
+
+    // Use relative smaller number of handlers for testing
+    protected int numOfKsmHandlers = 20;
+    protected int numOfScmHandlers = 20;
+    protected int numOfDatanodes = 1;
+
+    protected Builder(OzoneConfiguration conf) {
+      this.conf = conf;
+      this.clusterId = UUID.randomUUID().toString();
+      this.path = GenericTestUtils.getTempPath(
+          MiniOzoneClusterImpl.class.getSimpleName() + "-" + clusterId);
+    }
+
+    /**
+     * Sets the cluster Id.
+     *
+     * @param id cluster Id
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setClusterId(String id) {
+      clusterId = id;
+      return this;
+    }
+
+    /**
+     * Sets the SCM id.
+     *
+     * @param id SCM Id
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setScmId(String id) {
+      scmId = Optional.of(id);
+      return this;
+    }
+
+    /**
+     * Sets the KSM id.
+     *
+     * @param id KSM Id
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setKsmId(String id) {
+      ksmId = Optional.of(id);
+      return this;
+    }
+
+    /**
+     * If set to true container service will be started in a random port.
+     *
+     * @param randomPort enable random port
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setRandomContainerPort(boolean randomPort) {
+      randomContainerPort = randomPort;
+      return this;
+    }
+
+    /**
+     * Sets the number of HddsDatanodes to be started as part of
+     * MiniOzoneCluster.
+     *
+     * @param val number of datanodes
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setNumDatanodes(int val) {
+      numOfDatanodes = val;
+      return this;
+    }
+
+
+    /**
+     * Sets the number of HeartBeat Interval of Datanodes, the value should be
+     * in MilliSeconds.
+     *
+     * @param val HeartBeat interval in milliseconds
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setHbInterval(int val) {
+      hbInterval = Optional.of(val);
+      return this;
+    }
+
+    /**
+     * Sets the number of HeartBeat Processor Interval of Datanodes,
+     * the value should be in MilliSeconds.
+     *
+     * @param val HeartBeat Processor interval in milliseconds
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setHbProcessorInterval(int val) {
+      hbProcessorInterval = Optional.of(val);
+      return this;
+    }
+
+    /**
+     * When set to true, enables trace level logging.
+     *
+     * @param trace true or false
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder setTrace(Boolean trace) {
+      enableTrace = Optional.of(trace);
+      return this;
+    }
+
+    /**
+     * Modifies the configuration such that Ozone will be disabled.
+     *
+     * @return MiniOzoneCluster.Builder
+     */
+    public Builder disableOzone() {
+      ozoneEnabled = false;
+      return this;
+    }
+
+    /**
+     * Constructs and returns MiniOzoneCluster.
+     *
+     * @return {@link MiniOzoneCluster}
+     *
+     * @throws IOException
+     */
+    public abstract MiniOzoneCluster build() throws IOException;
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
new file mode 100644
index 0000000..891f67b
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
@@ -0,0 +1,425 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ipc.Client;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneClientFactory;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.ksm.KSMConfigKeys;
+import org.apache.hadoop.ozone.ksm.KeySpaceManager;
+import org.apache.hadoop.hdds.scm.SCMStorage;
+import org.apache.hadoop.ozone.ksm.KSMStorage;
+import org.apache.hadoop.ozone.web.client.OzoneRestClient;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.test.GenericTestUtils;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.event.Level;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState
+    .HEALTHY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.HDDS_DATANODE_PLUGINS_KEY;
+
+/**
+ * MiniOzoneCluster creates a complete in-process Ozone cluster suitable for
+ * running tests.  The cluster consists of a KeySpaceManager,
+ * StorageContainerManager and multiple DataNodes.
+ */
+@InterfaceAudience.Private
+public final class MiniOzoneClusterImpl implements MiniOzoneCluster {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MiniOzoneClusterImpl.class);
+
+  private final OzoneConfiguration conf;
+  private final StorageContainerManager scm;
+  private final KeySpaceManager ksm;
+  private final List<HddsDatanodeService> hddsDatanodes;
+
+  /**
+   * Creates a new MiniOzoneCluster.
+   *
+   * @throws IOException if there is an I/O error
+   */
+  private MiniOzoneClusterImpl(OzoneConfiguration conf,
+                               KeySpaceManager ksm,
+                               StorageContainerManager scm,
+                               List<HddsDatanodeService> hddsDatanodes) {
+    this.conf = conf;
+    this.ksm = ksm;
+    this.scm = scm;
+    this.hddsDatanodes = hddsDatanodes;
+  }
+
+  public OzoneConfiguration getConf() {
+    return conf;
+  }
+
+  /**
+   * Waits for the Ozone cluster to be ready for processing requests.
+   */
+  @Override
+  public void waitForClusterToBeReady()
+      throws TimeoutException, InterruptedException {
+    GenericTestUtils.waitFor(() -> {
+      final int healthy = scm.getNodeCount(HEALTHY);
+      final boolean isReady = healthy == hddsDatanodes.size();
+      LOG.info("{}. Got {} of {} DN Heartbeats.",
+          isReady? "Cluster is ready" : "Waiting for cluster to be ready",
+          healthy, hddsDatanodes.size());
+      return isReady;
+    }, 1000, 60 * 1000); //wait for 1 min.
+  }
+
+  /**
+   * Waits for SCM to be out of Chill Mode. Many tests can be run iff we are out
+   * of Chill mode.
+   *
+   * @throws TimeoutException
+   * @throws InterruptedException
+   */
+  @Override
+  public void waitTobeOutOfChillMode()
+      throws TimeoutException, InterruptedException {
+    GenericTestUtils.waitFor(() -> {
+      if (scm.getScmNodeManager().isOutOfChillMode()) {
+        return true;
+      }
+      LOG.info("Waiting for cluster to be ready. No datanodes found");
+      return false;
+    }, 100, 45000);
+  }
+
+  @Override
+  public StorageContainerManager getStorageContainerManager() {
+    return this.scm;
+  }
+
+  @Override
+  public KeySpaceManager getKeySpaceManager() {
+    return this.ksm;
+  }
+
+  @Override
+  public List<HddsDatanodeService> getHddsDatanodes() {
+    return hddsDatanodes;
+  }
+
+  @Override
+  public OzoneClient getClient() throws IOException {
+    return OzoneClientFactory.getClient(conf);
+  }
+
+  @Override
+  public OzoneClient getRpcClient() throws IOException {
+    return OzoneClientFactory.getRpcClient(conf);
+  }
+
+  /**
+   * Creates an {@link OzoneRestClient} connected to this cluster's REST
+   * service. Callers take ownership of the client and must close it when done.
+   *
+   * @return OzoneRestClient connected to this cluster's REST service
+   * @throws OzoneException if Ozone encounters an error creating the client
+   */
+  @Override
+  public OzoneClient getRestClient() throws IOException {
+    return OzoneClientFactory.getRestClient(conf);
+  }
+
+  /**
+   * Returns an RPC proxy connected to this cluster's StorageContainerManager
+   * for accessing container location information.  Callers take ownership of
+   * the proxy and must close it when done.
+   *
+   * @return RPC proxy for accessing container location information
+   * @throws IOException if there is an I/O error
+   */
+  @Override
+  public StorageContainerLocationProtocolClientSideTranslatorPB getStorageContainerLocationClient()
+      throws IOException {
+    long version = RPC.getProtocolVersion(
+        StorageContainerLocationProtocolPB.class);
+    InetSocketAddress address = scm.getClientRpcAddress();
+    LOG.info(
+        "Creating StorageContainerLocationProtocol RPC client with address {}",
+        address);
+    return new StorageContainerLocationProtocolClientSideTranslatorPB(
+        RPC.getProxy(StorageContainerLocationProtocolPB.class, version,
+            address, UserGroupInformation.getCurrentUser(), conf,
+            NetUtils.getDefaultSocketFactory(conf),
+            Client.getRpcTimeout(conf)));
+  }
+
+  @Override
+  public void restartStorageContainerManager() throws IOException {
+    scm.stop();
+    scm.start();
+  }
+
+  @Override
+  public void restartKeySpaceManager() throws IOException {
+    ksm.stop();
+    ksm.start();
+  }
+
+  @Override
+  public void restartHddsDatanode(int i) {
+    HddsDatanodeService datanodeService = hddsDatanodes.get(i);
+    datanodeService.stop();
+    datanodeService.join();
+    datanodeService.start(null);
+  }
+
+  @Override
+  public void shutdownHddsDatanode(int i) {
+    hddsDatanodes.get(i).stop();
+  }
+
+  @Override
+  public void shutdown() {
+    try {
+      LOG.info("Shutting down the Mini Ozone Cluster");
+
+      File baseDir = new File(GenericTestUtils.getTempPath(
+          MiniOzoneClusterImpl.class.getSimpleName() + "-" +
+              scm.getScmInfo().getClusterId()));
+      FileUtils.deleteDirectory(baseDir);
+
+      if (ksm != null) {
+        LOG.info("Shutting down the keySpaceManager");
+        ksm.stop();
+        ksm.join();
+      }
+
+      if (scm != null) {
+        LOG.info("Shutting down the StorageContainerManager");
+        scm.stop();
+        scm.join();
+      }
+
+      if (!hddsDatanodes.isEmpty()) {
+        LOG.info("Shutting down the HddsDatanodes");
+        for (HddsDatanodeService hddsDatanode : hddsDatanodes) {
+          hddsDatanode.stop();
+          hddsDatanode.join();
+        }
+      }
+    } catch (IOException e) {
+      LOG.error("Exception while shutting down the cluster.", e);
+    }
+  }
+
+  /**
+   * Builder for configuring the MiniOzoneCluster to run.
+   */
+  public static class Builder extends MiniOzoneCluster.Builder {
+
+    /**
+     * Creates a new Builder.
+     *
+     * @param conf configuration
+     */
+    public Builder(OzoneConfiguration conf) {
+      super(conf);
+    }
+
+    @Override
+    public MiniOzoneCluster build() throws IOException {
+      DefaultMetricsSystem.setMiniClusterMode(true);
+      initializeConfiguration();
+      StorageContainerManager scm = createSCM();
+      scm.start();
+      KeySpaceManager ksm = createKSM();
+      ksm.start();
+      List<HddsDatanodeService> hddsDatanodes = createHddsDatanodes(scm);
+      hddsDatanodes.forEach((datanode) -> datanode.start(null));
+      return new MiniOzoneClusterImpl(conf, ksm, scm, hddsDatanodes);
+    }
+
+    /**
+     * Initializes the configureation required for starting MiniOzoneCluster.
+     *
+     * @throws IOException
+     */
+    private void initializeConfiguration() throws IOException {
+      conf.setBoolean(OzoneConfigKeys.OZONE_ENABLED, ozoneEnabled);
+      Path metaDir = Paths.get(path, "ozone-meta");
+      Files.createDirectories(metaDir);
+      conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, metaDir.toString());
+      configureTrace();
+    }
+
+    /**
+     * Creates a new StorageContainerManager instance.
+     *
+     * @return {@link StorageContainerManager}
+     *
+     * @throws IOException
+     */
+    private StorageContainerManager createSCM() throws IOException {
+      configureSCM();
+      SCMStorage scmStore = new SCMStorage(conf);
+      scmStore.setClusterId(clusterId);
+      if (!scmId.isPresent()) {
+        scmId = Optional.of(UUID.randomUUID().toString());
+      }
+      scmStore.setScmId(scmId.get());
+      scmStore.initialize();
+      return StorageContainerManager.createSCM(null, conf);
+    }
+
+    /**
+     * Creates a new KeySpaceManager instance.
+     *
+     * @return {@link KeySpaceManager}
+     *
+     * @throws IOException
+     */
+    private KeySpaceManager createKSM() throws IOException {
+      configureKSM();
+      KSMStorage ksmStore = new KSMStorage(conf);
+      ksmStore.setClusterId(clusterId);
+      ksmStore.setScmId(scmId.get());
+      ksmStore.setKsmId(ksmId.orElse(UUID.randomUUID().toString()));
+      ksmStore.initialize();
+      return KeySpaceManager.createKSM(null, conf);
+    }
+
+    /**
+     * Creates HddsDatanodeService(s) instance.
+     *
+     * @return List of HddsDatanodeService
+     *
+     * @throws IOException
+     */
+    private List<HddsDatanodeService> createHddsDatanodes(
+        StorageContainerManager scm) throws IOException {
+      configureHddsDatanodes();
+      String scmAddress =  scm.getDatanodeRpcAddress().getHostString() +
+          ":" + scm.getDatanodeRpcAddress().getPort();
+      conf.setStrings(ScmConfigKeys.OZONE_SCM_NAMES, scmAddress);
+      List<HddsDatanodeService> hddsDatanodes = new ArrayList<>();
+      for (int i = 0; i < numOfDatanodes; i++) {
+        Configuration dnConf = new OzoneConfiguration(conf);
+        String datanodeBaseDir = path + "/datanode-" + Integer.toString(i);
+        Path metaDir = Paths.get(datanodeBaseDir, "meta");
+        Path dataDir = Paths.get(datanodeBaseDir, "data", "containers");
+        Path ratisDir = Paths.get(datanodeBaseDir, "data", "ratis");
+        Files.createDirectories(metaDir);
+        Files.createDirectories(dataDir);
+        Files.createDirectories(ratisDir);
+        dnConf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, metaDir.toString());
+        dnConf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, dataDir.toString());
+        dnConf.set(OzoneConfigKeys.DFS_CONTAINER_RATIS_DATANODE_STORAGE_DIR,
+            ratisDir.toString());
+
+        hddsDatanodes.add(
+            HddsDatanodeService.createHddsDatanodeService(dnConf));
+      }
+      return hddsDatanodes;
+    }
+
+    private void configureSCM() {
+      conf.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "127.0.0.1:0");
+      conf.set(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY, "127.0.0.1:0");
+      conf.set(ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, "127.0.0.1:0");
+      conf.set(ScmConfigKeys.OZONE_SCM_HTTP_ADDRESS_KEY, "127.0.0.1:0");
+      conf.setInt(ScmConfigKeys.OZONE_SCM_HANDLER_COUNT_KEY, numOfScmHandlers);
+      configureSCMheartbeat();
+    }
+
+    private void configureSCMheartbeat() {
+      if (hbInterval.isPresent()) {
+        conf.getTimeDuration(ScmConfigKeys.OZONE_SCM_HEARTBEAT_INTERVAL,
+            hbInterval.get(), TimeUnit.MILLISECONDS);
+
+      } else {
+        conf.setTimeDuration(ScmConfigKeys.OZONE_SCM_HEARTBEAT_INTERVAL,
+            DEFAULT_HB_INTERVAL_MS,
+            TimeUnit.MILLISECONDS);
+      }
+
+      if (hbProcessorInterval.isPresent()) {
+        conf.setTimeDuration(
+            ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
+            hbProcessorInterval.get(),
+            TimeUnit.MILLISECONDS);
+      } else {
+        conf.setTimeDuration(
+            ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
+            DEFAULT_HB_PROCESSOR_INTERVAL_MS,
+            TimeUnit.MILLISECONDS);
+      }
+    }
+
+
+    private void configureKSM() {
+      conf.set(KSMConfigKeys.OZONE_KSM_ADDRESS_KEY, "127.0.0.1:0");
+      conf.set(KSMConfigKeys.OZONE_KSM_HTTP_ADDRESS_KEY, "127.0.0.1:0");
+      conf.setInt(KSMConfigKeys.OZONE_KSM_HANDLER_COUNT_KEY, numOfKsmHandlers);
+    }
+
+    private void configureHddsDatanodes() {
+      conf.set(ScmConfigKeys.HDDS_REST_HTTP_ADDRESS_KEY, "0.0.0.0:0");
+      conf.set(HDDS_DATANODE_PLUGINS_KEY,
+          "org.apache.hadoop.ozone.web.OzoneHddsDatanodeService");
+      conf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_IPC_RANDOM_PORT,
+          randomContainerPort);
+      conf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT,
+          randomContainerPort);
+    }
+
+    private void configureTrace() {
+      if (enableTrace.isPresent()) {
+        conf.setBoolean(OzoneConfigKeys.OZONE_TRACE_ENABLED_KEY,
+            enableTrace.get());
+        GenericTestUtils.setRootLogLevel(Level.TRACE);
+      }
+      GenericTestUtils.setRootLogLevel(Level.INFO);
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/RatisTestHelper.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/RatisTestHelper.java
new file mode 100644
index 0000000..9aefe9a
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/RatisTestHelper.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.container.ContainerTestHelper;
+import org.apache.hadoop.ozone.web.client.OzoneRestClient;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.ratis.rpc.RpcType;
+import org.apache.ratis.rpc.SupportedRpcType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * Helpers for Ratis tests.
+ */
+public interface RatisTestHelper {
+  Logger LOG = LoggerFactory.getLogger(RatisTestHelper.class);
+
+  /** For testing Ozone with Ratis. */
+  class RatisTestSuite implements Closeable {
+    static final RpcType RPC = SupportedRpcType.NETTY;
+    static final int NUM_DATANODES = 3;
+
+    private final OzoneConfiguration conf;
+    private final MiniOzoneCluster cluster;
+
+    /**
+     * Create a {@link MiniOzoneCluster} for testing by setting
+     *   OZONE_ENABLED = true,
+     *   RATIS_ENABLED = true, and
+     *   OZONE_HANDLER_TYPE_KEY = "distributed".
+     */
+    public RatisTestSuite(final Class<?> clazz) throws IOException {
+      conf = newOzoneConfiguration(clazz, RPC);
+      cluster = newMiniOzoneCluster(NUM_DATANODES, conf);
+    }
+
+    public OzoneConfiguration getConf() {
+      return conf;
+    }
+
+    public MiniOzoneCluster getCluster() {
+      return cluster;
+    }
+
+    public OzoneRestClient newOzoneRestClient()
+        throws OzoneException, URISyntaxException {
+      return RatisTestHelper.newOzoneRestClient(getDatanodeOzoneRestPort());
+    }
+
+    @Override
+    public void close() {
+      cluster.shutdown();
+    }
+
+    public int getDatanodeOzoneRestPort() {
+      return cluster.getHddsDatanodes().get(0).getDatanodeDetails()
+          .getOzoneRestPort();
+    }
+  }
+
+  static OzoneConfiguration newOzoneConfiguration(
+      Class<?> clazz, RpcType rpc) {
+    final OzoneConfiguration conf = new OzoneConfiguration();
+    ContainerTestHelper.setOzoneLocalStorageRoot(clazz, conf);
+    initRatisConf(rpc, conf);
+    return conf;
+  }
+
+  static void initRatisConf(RpcType rpc, Configuration conf) {
+    conf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY, true);
+    conf.set(OzoneConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_KEY, rpc.name());
+    LOG.info(OzoneConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_KEY
+        + " = " + rpc.name());
+  }
+
+  static MiniOzoneCluster newMiniOzoneCluster(
+      int numDatanodes, OzoneConfiguration conf) throws IOException {
+    final MiniOzoneCluster cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(numDatanodes).build();
+    return cluster;
+  }
+
+  static OzoneRestClient newOzoneRestClient(int port)
+      throws OzoneException, URISyntaxException {
+    return new OzoneRestClient("http://localhost:" + port);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java
new file mode 100644
index 0000000..20579fd
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestContainerOperations.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms.ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementCapacity;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * This class tests container operations (TODO currently only supports create)
+ * from cblock clients.
+ */
+public class TestContainerOperations {
+
+  private static ScmClient storageClient;
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration ozoneConf;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    int containerSizeGB = 5;
+    ContainerOperationClient.setContainerSizeB(
+        containerSizeGB * OzoneConsts.GB);
+    ozoneConf = new OzoneConfiguration();
+    ozoneConf.setClass(ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
+        SCMContainerPlacementCapacity.class, ContainerPlacementPolicy.class);
+    cluster = MiniOzoneCluster.newBuilder(ozoneConf).setNumDatanodes(1).build();
+    StorageContainerLocationProtocolClientSideTranslatorPB client =
+        cluster.getStorageContainerLocationClient();
+    RPC.setProtocolEngine(ozoneConf, StorageContainerLocationProtocolPB.class,
+        ProtobufRpcEngine.class);
+    storageClient = new ContainerOperationClient(
+        client, new XceiverClientManager(ozoneConf));
+    cluster.waitForClusterToBeReady();
+  }
+
+  @AfterClass
+  public static void cleanup() throws Exception {
+    if(cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  /**
+   * A simple test to create a container with {@link ContainerOperationClient}.
+   * @throws Exception
+   */
+  @Test
+  public void testCreate() throws Exception {
+    Pipeline pipeline0 = storageClient.createContainer(HddsProtos
+        .ReplicationType.STAND_ALONE, HddsProtos.ReplicationFactor
+        .ONE, "container0", "OZONE");
+    assertEquals("container0", pipeline0.getContainerName());
+
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestMiniOzoneCluster.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestMiniOzoneCluster.java
new file mode 100644
index 0000000..6755e34
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestMiniOzoneCluster.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.container.common.SCMTestUtils;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
+import org.apache.hadoop.ozone.container.ozoneimpl.TestOzoneContainer;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.XceiverClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.test.PathUtils;
+import org.apache.hadoop.test.TestGenericTestUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+import static org.junit.Assert.*;
+
+/**
+ * Test cases for mini ozone cluster.
+ */
+public class TestMiniOzoneCluster {
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+
+  private final static File TEST_ROOT = TestGenericTestUtils.getTestDir();
+  private final static File WRITE_TMP = new File(TEST_ROOT, "write");
+  private final static File READ_TMP = new File(TEST_ROOT, "read");
+
+  @BeforeClass
+  public static void setup() {
+    conf = new OzoneConfiguration();
+    conf.set(OZONE_METADATA_DIRS,
+        TEST_ROOT.toString());
+    conf.setBoolean(DFS_CONTAINER_RATIS_IPC_RANDOM_PORT, true);
+    WRITE_TMP.mkdirs();
+    READ_TMP.mkdirs();
+    WRITE_TMP.deleteOnExit();
+    READ_TMP.deleteOnExit();
+  }
+
+  @AfterClass
+  public static void cleanup() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test(timeout = 30000)
+  public void testStartMultipleDatanodes() throws Exception {
+    final int numberOfNodes = 3;
+    cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(numberOfNodes)
+        .build();
+    cluster.waitForClusterToBeReady();
+    List<HddsDatanodeService> datanodes = cluster.getHddsDatanodes();
+    assertEquals(numberOfNodes, datanodes.size());
+    for(HddsDatanodeService dn : datanodes) {
+      // Create a single member pipe line
+      String containerName = OzoneUtils.getRequestID();
+      DatanodeDetails datanodeDetails = dn.getDatanodeDetails();
+      final PipelineChannel pipelineChannel =
+          new PipelineChannel(datanodeDetails.getUuidString(),
+              HddsProtos.LifeCycleState.OPEN,
+              HddsProtos.ReplicationType.STAND_ALONE,
+              HddsProtos.ReplicationFactor.ONE, "test");
+      pipelineChannel.addMember(datanodeDetails);
+      Pipeline pipeline = new Pipeline(containerName, pipelineChannel);
+
+      // Verify client is able to connect to the container
+      try (XceiverClient client = new XceiverClient(pipeline, conf)){
+        client.connect();
+        assertTrue(client.isConnected());
+      }
+    }
+  }
+
+  @Test
+  public void testDatanodeIDPersistent() throws Exception {
+    // Generate IDs for testing
+    DatanodeDetails id1 = TestUtils.getDatanodeDetails();
+    DatanodeDetails id2 = TestUtils.getDatanodeDetails();
+    DatanodeDetails id3 = TestUtils.getDatanodeDetails();
+    id1.setContainerPort(1);
+    id2.setContainerPort(2);
+    id3.setContainerPort(3);
+
+    // Write a single ID to the file and read it out
+    File validIdsFile = new File(WRITE_TMP, "valid-values.id");
+    validIdsFile.delete();
+    ContainerUtils.writeDatanodeDetailsTo(id1, validIdsFile);
+    DatanodeDetails validId = ContainerUtils.readDatanodeDetailsFrom(
+        validIdsFile);
+
+    assertEquals(id1, validId);
+    assertEquals(id1.getProtoBufMessage(), validId.getProtoBufMessage());
+
+    // Read should return an empty value if file doesn't exist
+    File nonExistFile = new File(READ_TMP, "non_exist.id");
+    nonExistFile.delete();
+    try {
+      ContainerUtils.readDatanodeDetailsFrom(nonExistFile);
+      Assert.fail();
+    } catch (Exception e) {
+      assertTrue(e instanceof IOException);
+    }
+
+    // Read should fail if the file is malformed
+    File malformedFile = new File(READ_TMP, "malformed.id");
+    createMalformedIDFile(malformedFile);
+    try {
+      ContainerUtils.readDatanodeDetailsFrom(malformedFile);
+      fail("Read a malformed ID file should fail");
+    } catch (Exception e) {
+      assertTrue(e instanceof IOException);
+    }
+  }
+
+  @Test
+  public void testContainerRandomPort() throws IOException {
+    Configuration ozoneConf = SCMTestUtils.getConf();
+    File testDir = PathUtils.getTestDir(TestOzoneContainer.class);
+    ozoneConf.set(DFS_DATANODE_DATA_DIR_KEY, testDir.getAbsolutePath());
+    ozoneConf.set(OZONE_METADATA_DIRS,
+        TEST_ROOT.toString());
+
+    // Each instance of SM will create an ozone container
+    // that bounds to a random port.
+    ozoneConf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_IPC_RANDOM_PORT, true);
+    ozoneConf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT,
+        true);
+    try (
+        DatanodeStateMachine sm1 = new DatanodeStateMachine(
+            TestUtils.getDatanodeDetails(), ozoneConf);
+        DatanodeStateMachine sm2 = new DatanodeStateMachine(
+            TestUtils.getDatanodeDetails(), ozoneConf);
+        DatanodeStateMachine sm3 = new DatanodeStateMachine(
+            TestUtils.getDatanodeDetails(), ozoneConf)
+    ) {
+      HashSet<Integer> ports = new HashSet<Integer>();
+      assertTrue(ports.add(sm1.getContainer().getContainerServerPort()));
+      assertTrue(ports.add(sm2.getContainer().getContainerServerPort()));
+      assertTrue(ports.add(sm3.getContainer().getContainerServerPort()));
+
+      // Assert that ratis is also on a different port.
+      assertTrue(ports.add(sm1.getContainer().getRatisContainerServerPort()));
+      assertTrue(ports.add(sm2.getContainer().getRatisContainerServerPort()));
+      assertTrue(ports.add(sm3.getContainer().getRatisContainerServerPort()));
+
+
+    }
+
+    // Turn off the random port flag and test again
+    ozoneConf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_IPC_RANDOM_PORT, false);
+    try (
+        DatanodeStateMachine sm1 = new DatanodeStateMachine(
+            TestUtils.getDatanodeDetails(), ozoneConf);
+        DatanodeStateMachine sm2 = new DatanodeStateMachine(
+            TestUtils.getDatanodeDetails(), ozoneConf);
+        DatanodeStateMachine sm3 = new DatanodeStateMachine(
+            TestUtils.getDatanodeDetails(), ozoneConf)
+    ) {
+      HashSet<Integer> ports = new HashSet<Integer>();
+      assertTrue(ports.add(sm1.getContainer().getContainerServerPort()));
+      assertFalse(ports.add(sm2.getContainer().getContainerServerPort()));
+      assertFalse(ports.add(sm3.getContainer().getContainerServerPort()));
+      assertEquals(ports.iterator().next().intValue(),
+          conf.getInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+              OzoneConfigKeys.DFS_CONTAINER_IPC_PORT_DEFAULT));
+    }
+  }
+
+  private void createMalformedIDFile(File malformedFile)
+      throws IOException{
+    malformedFile.delete();
+    DatanodeDetails id = TestUtils.getDatanodeDetails();
+    ContainerUtils.writeDatanodeDetailsTo(id, malformedFile);
+
+    FileOutputStream out = new FileOutputStream(malformedFile);
+    out.write("malformed".getBytes());
+    out.close();
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
new file mode 100644
index 0000000..533a3b4
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.conf.TestConfigurationFieldsBase;
+import org.apache.hadoop.ozone.ksm.KSMConfigKeys;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+
+/**
+ * Tests if configuration constants documented in ozone-defaults.xml.
+ */
+public class TestOzoneConfigurationFields extends TestConfigurationFieldsBase {
+
+  @Override
+  public void initializeMemberVariables() {
+    xmlFilename = new String("ozone-default.xml");
+    configurationClasses =
+        new Class[] {OzoneConfigKeys.class, ScmConfigKeys.class,
+            KSMConfigKeys.class};
+    errorIfMissingConfigProps = true;
+    errorIfMissingXmlProps = true;
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneHelper.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneHelper.java
new file mode 100644
index 0000000..8a3d1a8
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneHelper.java
@@ -0,0 +1,413 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.util.Time;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+
+import javax.ws.rs.core.HttpHeaders;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Locale;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Helper functions to test Ozone.
+ */
+public class TestOzoneHelper {
+
+  public CloseableHttpClient createHttpClient() {
+    return HttpClients.createDefault();
+  }
+  /**
+   * Creates Volumes on Ozone Store.
+   *
+   * @throws IOException
+   */
+  public void testCreateVolumes(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    CloseableHttpClient client = createHttpClient();
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    try {
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s", port, volumeName));
+
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httppost.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httppost);
+      assertEquals(response.toString(), HTTP_CREATED,
+          response.getStatusLine().getStatusCode());
+    } finally {
+      client.close();
+    }
+  }
+
+  /**
+   * Create Volumes with Quota.
+   *
+   * @throws IOException
+   */
+  public void testCreateVolumesWithQuota(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    CloseableHttpClient client = createHttpClient();
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    try {
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s?quota=10TB", port, volumeName));
+
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httppost.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httppost);
+      assertEquals(response.toString(), HTTP_CREATED,
+          response.getStatusLine().getStatusCode());
+    } finally {
+      client.close();
+    }
+  }
+
+  /**
+   * Create Volumes with Invalid Quota.
+   *
+   * @throws IOException
+   */
+  public void testCreateVolumesWithInvalidQuota(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    CloseableHttpClient client = createHttpClient();
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    try {
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s?quota=NaN", port, volumeName));
+
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httppost.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httppost);
+      assertEquals(response.toString(), ErrorTable.MALFORMED_QUOTA
+              .getHttpCode(),
+          response.getStatusLine().getStatusCode());
+    } finally {
+      client.close();
+    }
+  }
+
+  /**
+   * To create a volume a user name must be specified using OZONE_USER header.
+   * This test verifies that we get an error in case we call without a OZONE
+   * user name.
+   *
+   * @throws IOException
+   */
+  public void testCreateVolumesWithInvalidUser(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    CloseableHttpClient client = createHttpClient();
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    try {
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s?quota=1TB", port, volumeName));
+
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httppost);
+
+      assertEquals(response.toString(), ErrorTable.USER_NOT_FOUND.getHttpCode(),
+          response.getStatusLine().getStatusCode());
+    } finally {
+      client.close();
+    }
+  }
+
+  /**
+   * Only Admins can create volumes in Ozone. This test uses simple userauth as
+   * backend and hdfs and root are admin users in the simple backend.
+   * <p>
+   * This test tries to create a volume as user bilbo.
+   *
+   * @throws IOException
+   */
+  public void testCreateVolumesWithOutAdminRights(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    CloseableHttpClient client = createHttpClient();
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    try {
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s?quota=NaN", port, volumeName));
+
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              "bilbo"); // This is not a root user in Simple Auth
+      httppost.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httppost);
+      assertEquals(response.toString(), ErrorTable.ACCESS_DENIED.getHttpCode(),
+          response.getStatusLine().getStatusCode());
+    } finally {
+      client.close();
+    }
+  }
+
+  /**
+   * Create a bunch of volumes in a loop.
+   *
+   * @throws IOException
+   */
+  public void testCreateVolumesInLoop(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+
+    for (int x = 0; x < 1000; x++) {
+      CloseableHttpClient client = createHttpClient();
+      String volumeName = OzoneUtils.getRequestID().toLowerCase();
+      String userName = OzoneUtils.getRequestID().toLowerCase();
+
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s?quota=10TB", port, volumeName));
+
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httppost.addHeader(Header.OZONE_USER, userName);
+
+      HttpResponse response = client.execute(httppost);
+      assertEquals(response.toString(), HTTP_CREATED,
+          response.getStatusLine().getStatusCode());
+      client.close();
+    }
+  }
+  /**
+   * Get volumes owned by the user.
+   *
+   * @throws IOException
+   */
+  public void testGetVolumesByUser(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    // We need to create a volume for this test to succeed.
+    testCreateVolumes(port);
+    CloseableHttpClient client = createHttpClient();
+    try {
+      HttpGet httpget =
+          new HttpGet(String.format("http://localhost:%d/", port));
+
+      httpget.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+
+      httpget.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+
+      httpget.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      httpget.addHeader(Header.OZONE_USER,
+          OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httpget);
+      assertEquals(response.toString(), HTTP_OK,
+          response.getStatusLine().getStatusCode());
+
+    } finally {
+      client.close();
+    }
+  }
+
+  /**
+   * Admins can read volumes belonging to other users.
+   *
+   * @throws IOException
+   */
+  public void testGetVolumesOfAnotherUser(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+
+    CloseableHttpClient client = createHttpClient();
+    try {
+      HttpGet httpget =
+          new HttpGet(String.format("http://localhost:%d/", port));
+
+      httpget.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httpget.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+
+      httpget.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_ROOT_USER);
+
+      // User Root is getting volumes belonging to user HDFS
+      httpget.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httpget);
+      assertEquals(response.toString(), HTTP_OK,
+          response.getStatusLine().getStatusCode());
+
+    } finally {
+      client.close();
+    }
+  }
+
+  /**
+   * if you try to read volumes belonging to another user,
+   * then server always ignores it.
+   *
+   * @throws IOException
+   */
+  public void testGetVolumesOfAnotherUserShouldFail(int port)
+      throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+
+    CloseableHttpClient client = createHttpClient();
+    String userName = OzoneUtils.getRequestID().toLowerCase();
+    try {
+      HttpGet httpget =
+          new HttpGet(String.format("http://localhost:%d/", port));
+
+      httpget.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httpget.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+
+      httpget.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              userName);
+
+      // userName is NOT a root user, hence he should NOT be able to read the
+      // volumes of user HDFS
+      httpget.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httpget);
+      // We will get an Error called userNotFound when using Simple Auth Scheme
+      assertEquals(response.toString(), ErrorTable.USER_NOT_FOUND.getHttpCode(),
+          response.getStatusLine().getStatusCode());
+
+    } finally {
+      client.close();
+    }
+  }
+
+  public void testListKeyOnEmptyBucket(int port) throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    CloseableHttpClient client = createHttpClient();
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    String bucketName = OzoneUtils.getRequestID().toLowerCase() + "bucket";
+    try {
+
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s", port, volumeName));
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " "
+              + OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httppost.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      HttpResponse response = client.execute(httppost);
+      assertEquals(response.toString(), HTTP_CREATED,
+          response.getStatusLine().getStatusCode());
+      client.close();
+
+      client = createHttpClient();
+      httppost = new HttpPost(String
+          .format("http://localhost:%d/%s/%s", port, volumeName, bucketName));
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " "
+              + OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httppost.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      response = client.execute(httppost);
+      assertEquals(response.toString(), HTTP_CREATED,
+          response.getStatusLine().getStatusCode());
+      client.close();
+
+      client = createHttpClient();
+      HttpGet httpget = new HttpGet(String
+          .format("http://localhost:%d/%s/%s", port, volumeName, bucketName));
+      httpget.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httpget.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.monotonicNow())));
+      httpget.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " "
+              + OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httpget.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      response = client.execute(httpget);
+      assertEquals(response.toString() + " " + response.getStatusLine()
+              .getReasonPhrase(), HTTP_OK,
+          response.getStatusLine().getStatusCode());
+
+    } finally {
+      client.close();
+    }
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java
new file mode 100644
index 0000000..fa307c9
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java
@@ -0,0 +1,443 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import static org.junit.Assert.fail;
+import java.io.IOException;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.SCMStorage;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
+import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.hdds.scm.StorageContainerManager.StartupOption;
+import org.apache.hadoop.hdds.scm.block.DeletedBlockLog;
+import org.apache.hadoop.hdds.scm.block.SCMBlockDeletingService;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.junit.Rule;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.Map;
+import java.util.Collections;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+
+import org.junit.rules.Timeout;
+import org.mockito.Mockito;
+import org.apache.hadoop.test.GenericTestUtils;
+
+/**
+ * Test class that exercises the StorageContainerManager.
+ */
+public class TestStorageContainerManager {
+  private static XceiverClientManager xceiverClientManager =
+      new XceiverClientManager(
+      new OzoneConfiguration());
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @Test
+  public void testRpcPermission() throws Exception {
+    // Test with default configuration
+    OzoneConfiguration defaultConf = new OzoneConfiguration();
+    testRpcPermissionWithConf(defaultConf, "unknownUser", true);
+
+    // Test with ozone.administrators defined in configuration
+    OzoneConfiguration ozoneConf = new OzoneConfiguration();
+    ozoneConf.setStrings(OzoneConfigKeys.OZONE_ADMINISTRATORS,
+        "adminUser1, adminUser2");
+    // Non-admin user will get permission denied.
+    testRpcPermissionWithConf(ozoneConf, "unknownUser", true);
+    // Admin user will pass the permission check.
+    testRpcPermissionWithConf(ozoneConf, "adminUser2", false);
+  }
+
+  private void testRpcPermissionWithConf(
+      OzoneConfiguration ozoneConf, String fakeRemoteUsername,
+      boolean expectPermissionDenied) throws Exception {
+    MiniOzoneCluster cluster = MiniOzoneCluster.newBuilder(ozoneConf).build();
+    cluster.waitForClusterToBeReady();
+    try {
+      String fakeUser = fakeRemoteUsername;
+      StorageContainerManager mockScm = Mockito.spy(
+          cluster.getStorageContainerManager());
+      Mockito.when(mockScm.getPpcRemoteUsername())
+          .thenReturn(fakeUser);
+
+      try {
+        mockScm.deleteContainer("container1");
+        fail("Operation should fail, expecting an IOException here.");
+      } catch (Exception e) {
+        if (expectPermissionDenied) {
+          verifyPermissionDeniedException(e, fakeUser);
+        } else {
+          // If passes permission check, it should fail with
+          // container not exist exception.
+          Assert.assertTrue(e.getMessage()
+              .contains("container doesn't exist"));
+        }
+      }
+
+      try {
+        Pipeline pipeLine2 = mockScm.allocateContainer(
+            xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, "container2", "OZONE");
+        if (expectPermissionDenied) {
+          fail("Operation should fail, expecting an IOException here.");
+        } else {
+          Assert.assertEquals("container2", pipeLine2.getContainerName());
+        }
+      } catch (Exception e) {
+        verifyPermissionDeniedException(e, fakeUser);
+      }
+
+      try {
+        Pipeline pipeLine3 = mockScm.allocateContainer(
+            xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, "container3", "OZONE");
+
+        if (expectPermissionDenied) {
+          fail("Operation should fail, expecting an IOException here.");
+        } else {
+          Assert.assertEquals("container3", pipeLine3.getContainerName());
+          Assert.assertEquals(1, pipeLine3.getMachines().size());
+        }
+      } catch (Exception e) {
+        verifyPermissionDeniedException(e, fakeUser);
+      }
+
+      try {
+        mockScm.getContainer("container4");
+        fail("Operation should fail, expecting an IOException here.");
+      } catch (Exception e) {
+        if (expectPermissionDenied) {
+          verifyPermissionDeniedException(e, fakeUser);
+        } else {
+          // If passes permission check, it should fail with
+          // key not exist exception.
+          Assert.assertTrue(e.getMessage()
+              .contains("Specified key does not exist"));
+        }
+      }
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+  private void verifyPermissionDeniedException(Exception e, String userName) {
+    String expectedErrorMessage = "Access denied for user "
+        + userName + ". " + "Superuser privilege is required.";
+    Assert.assertTrue(e instanceof IOException);
+    Assert.assertEquals(expectedErrorMessage, e.getMessage());
+  }
+
+  @Test
+  public void testBlockDeletionTransactions() throws Exception {
+    int numKeys = 5;
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.setTimeDuration(ScmConfigKeys.OZONE_SCM_HEARTBEAT_INTERVAL,
+        5,
+        TimeUnit.SECONDS);
+    conf.setTimeDuration(ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
+        3000,
+        TimeUnit.MILLISECONDS);
+    conf.setInt(ScmConfigKeys.OZONE_SCM_BLOCK_DELETION_MAX_RETRY, 5);
+    conf.setTimeDuration(OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL,
+        1, TimeUnit.SECONDS);
+    // Reset container provision size, otherwise only one container
+    // is created by default.
+    conf.setInt(ScmConfigKeys.OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE,
+        numKeys);
+
+    MiniOzoneCluster cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+
+    try {
+      DeletedBlockLog delLog = cluster.getStorageContainerManager()
+          .getScmBlockManager().getDeletedBlockLog();
+      Assert.assertEquals(0, delLog.getNumOfValidTransactions());
+
+      // Create {numKeys} random names keys.
+      TestStorageContainerManagerHelper helper =
+          new TestStorageContainerManagerHelper(cluster, conf);
+      Map<String, KsmKeyInfo> keyLocations = helper.createKeys(numKeys, 4096);
+
+      Map<String, List<String>> containerBlocks = createDeleteTXLog(delLog,
+          keyLocations, helper);
+      Set<String> containerNames = containerBlocks.keySet();
+
+      // Verify a few TX gets created in the TX log.
+      Assert.assertTrue(delLog.getNumOfValidTransactions() > 0);
+
+      // Once TXs are written into the log, SCM starts to fetch TX
+      // entries from the log and schedule block deletions in HB interval,
+      // after sometime, all the TX should be proceed and by then
+      // the number of containerBlocks of all known containers will be
+      // empty again.
+      GenericTestUtils.waitFor(() -> {
+        try {
+          return delLog.getNumOfValidTransactions() == 0;
+        } catch (IOException e) {
+          return false;
+        }
+      }, 1000, 10000);
+      Assert.assertTrue(helper.getAllBlocks(containerNames).isEmpty());
+
+      // Continue the work, add some TXs that with known container names,
+      // but unknown block IDs.
+      for (String containerName : containerBlocks.keySet()) {
+        // Add 2 TXs per container.
+        delLog.addTransaction(containerName,
+            Collections.singletonList(RandomStringUtils.randomAlphabetic(5)));
+        delLog.addTransaction(containerName,
+            Collections.singletonList(RandomStringUtils.randomAlphabetic(5)));
+      }
+
+      // Verify a few TX gets created in the TX log.
+      Assert.assertTrue(delLog.getNumOfValidTransactions() > 0);
+
+      // These blocks cannot be found in the container, skip deleting them
+      // eventually these TX will success.
+      GenericTestUtils.waitFor(() -> {
+        try {
+          return delLog.getFailedTransactions().size() == 0;
+        } catch (IOException e) {
+          return false;
+        }
+      }, 1000, 10000);
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  @Test
+  public void testBlockDeletingThrottling() throws Exception {
+    int numKeys = 15;
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.setInt(ScmConfigKeys.OZONE_SCM_BLOCK_DELETION_MAX_RETRY, 5);
+    conf.setTimeDuration(OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL,
+        1000, TimeUnit.MILLISECONDS);
+    conf.setInt(ScmConfigKeys.OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE,
+        numKeys);
+
+    MiniOzoneCluster cluster = MiniOzoneCluster.newBuilder(conf)
+        .setHbInterval(5000)
+        .setHbProcessorInterval(3000)
+        .build();
+    cluster.waitForClusterToBeReady();
+
+    DeletedBlockLog delLog = cluster.getStorageContainerManager()
+        .getScmBlockManager().getDeletedBlockLog();
+    Assert.assertEquals(0, delLog.getNumOfValidTransactions());
+
+    int limitSize = 1;
+    // Reset limit value to 1, so that we only allow one TX is dealt per
+    // datanode.
+    SCMBlockDeletingService delService = cluster.getStorageContainerManager()
+        .getScmBlockManager().getSCMBlockDeletingService();
+    delService.setBlockDeleteTXNum(limitSize);
+
+    // Create {numKeys} random names keys.
+    TestStorageContainerManagerHelper helper =
+        new TestStorageContainerManagerHelper(cluster, conf);
+    Map<String, KsmKeyInfo> keyLocations = helper.createKeys(numKeys, 4096);
+
+    createDeleteTXLog(delLog, keyLocations, helper);
+    // Verify a few TX gets created in the TX log.
+    Assert.assertTrue(delLog.getNumOfValidTransactions() > 0);
+
+    // Verify the size in delete commands is expected.
+    GenericTestUtils.waitFor(() -> {
+      NodeManager nodeManager = cluster.getStorageContainerManager()
+          .getScmNodeManager();
+      ReportState reportState = ReportState.newBuilder()
+          .setState(ReportState.states.noContainerReports).setCount(0).build();
+      List<SCMCommand> commands = nodeManager.sendHeartbeat(
+          nodeManager.getNodes(NodeState.HEALTHY).get(0).getProtoBufMessage(),
+          null, reportState);
+
+      if (commands != null) {
+        for (SCMCommand cmd : commands) {
+          if (cmd.getType() == SCMCmdType.deleteBlocksCommand) {
+            List<DeletedBlocksTransaction> deletedTXs =
+                ((DeleteBlocksCommand) cmd).blocksTobeDeleted();
+            return deletedTXs != null && deletedTXs.size() == limitSize;
+          }
+        }
+      }
+      return false;
+    }, 500, 10000);
+  }
+
+  private Map<String, List<String>> createDeleteTXLog(DeletedBlockLog delLog,
+      Map<String, KsmKeyInfo> keyLocations,
+      TestStorageContainerManagerHelper helper) throws IOException {
+    // These keys will be written into a bunch of containers,
+    // gets a set of container names, verify container containerBlocks
+    // on datanodes.
+    Set<String> containerNames = new HashSet<>();
+    for (Map.Entry<String, KsmKeyInfo> entry : keyLocations.entrySet()) {
+      entry.getValue().getLatestVersionLocations().getLocationList()
+          .forEach(loc -> containerNames.add(loc.getContainerName()));
+    }
+
+    // Total number of containerBlocks of these containers should be equal to
+    // total number of containerBlocks via creation call.
+    int totalCreatedBlocks = 0;
+    for (KsmKeyInfo info : keyLocations.values()) {
+      totalCreatedBlocks += info.getKeyLocationVersions().size();
+    }
+    Assert.assertTrue(totalCreatedBlocks > 0);
+    Assert.assertEquals(totalCreatedBlocks,
+        helper.getAllBlocks(containerNames).size());
+
+    // Create a deletion TX for each key.
+    Map<String, List<String>> containerBlocks = Maps.newHashMap();
+    for (KsmKeyInfo info : keyLocations.values()) {
+      List<KsmKeyLocationInfo> list =
+          info.getLatestVersionLocations().getLocationList();
+      list.forEach(location -> {
+        if (containerBlocks.containsKey(location.getContainerName())) {
+          containerBlocks.get(location.getContainerName())
+              .add(location.getBlockID());
+        } else {
+          List<String> blks = Lists.newArrayList();
+          blks.add(location.getBlockID());
+          containerBlocks.put(location.getContainerName(), blks);
+        }
+      });
+    }
+    for (Map.Entry<String, List<String>> tx : containerBlocks.entrySet()) {
+      delLog.addTransaction(tx.getKey(), tx.getValue());
+    }
+
+    return containerBlocks;
+  }
+
+  @Test
+  public void testSCMInitialization() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    final String path = GenericTestUtils.getTempPath(
+        UUID.randomUUID().toString());
+    Path scmPath = Paths.get(path, "scm-meta");
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString());
+
+    StartupOption.INIT.setClusterId("testClusterId");
+    // This will initialize SCM
+    StorageContainerManager.scmInit(conf);
+
+    SCMStorage scmStore = new SCMStorage(conf);
+    Assert.assertEquals(NodeType.SCM, scmStore.getNodeType());
+    Assert.assertEquals("testClusterId", scmStore.getClusterID());
+    StartupOption.INIT.setClusterId("testClusterIdNew");
+    StorageContainerManager.scmInit(conf);
+    Assert.assertEquals(NodeType.SCM, scmStore.getNodeType());
+    Assert.assertEquals("testClusterId", scmStore.getClusterID());
+
+  }
+
+  @Test
+  public void testSCMReinitialization() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    final String path = GenericTestUtils.getTempPath(
+        UUID.randomUUID().toString());
+    Path scmPath = Paths.get(path, "scm-meta");
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString());
+    //This will set the cluster id in the version file
+    MiniOzoneCluster cluster =
+        MiniOzoneCluster.newBuilder(conf).setNumDatanodes(1).build();
+    cluster.waitForClusterToBeReady();
+    StartupOption.INIT.setClusterId("testClusterId");
+    // This will initialize SCM
+    StorageContainerManager.scmInit(conf);
+    SCMStorage scmStore = new SCMStorage(conf);
+    Assert.assertEquals(NodeType.SCM, scmStore.getNodeType());
+    Assert.assertNotEquals("testClusterId", scmStore.getClusterID());
+    cluster.shutdown();
+  }
+
+  @Test
+  public void testSCMInitializationFailure() throws IOException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    final String path =
+        GenericTestUtils.getTempPath(UUID.randomUUID().toString());
+    Path scmPath = Paths.get(path, "scm-meta");
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString());
+    conf.setBoolean(OzoneConfigKeys.OZONE_ENABLED, true);
+    exception.expect(SCMException.class);
+    exception.expectMessage("SCM not initialized.");
+    StorageContainerManager.createSCM(null, conf);
+  }
+
+  @Test
+  public void testScmInfo() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    final String path =
+        GenericTestUtils.getTempPath(UUID.randomUUID().toString());
+    Path scmPath = Paths.get(path, "scm-meta");
+    conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString());
+    conf.setBoolean(OzoneConfigKeys.OZONE_ENABLED, true);
+    SCMStorage scmStore = new SCMStorage(conf);
+    String clusterId = UUID.randomUUID().toString();
+    String scmId = UUID.randomUUID().toString();
+    scmStore.setClusterId(clusterId);
+    scmStore.setScmId(scmId);
+    // writes the version file properties
+    scmStore.initialize();
+    StorageContainerManager scm = StorageContainerManager.createSCM(null, conf);
+    //Reads the SCM Info from SCM instance
+    ScmInfo scmInfo = scm.getScmInfo();
+    Assert.assertEquals(clusterId, scmInfo.getClusterId());
+    Assert.assertEquals(scmId, scmInfo.getScmId());
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java
new file mode 100644
index 0000000..7005ea0
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.utils.MetadataStore;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.List;
+import java.util.Map;
+import java.util.LinkedList;
+import java.util.Set;
+
+/**
+ * A helper class used by {@link TestStorageContainerManager} to generate
+ * some keys and helps to verify containers and blocks locations.
+ */
+public class TestStorageContainerManagerHelper {
+
+  private final MiniOzoneCluster cluster;
+  private final Configuration conf;
+  private final StorageHandler storageHandler;
+
+  public TestStorageContainerManagerHelper(MiniOzoneCluster cluster,
+      Configuration conf) throws IOException {
+    this.cluster = cluster;
+    this.conf = conf;
+    storageHandler = new ObjectStoreHandler(conf).getStorageHandler();
+  }
+
+  public Map<String, KsmKeyInfo> createKeys(int numOfKeys, int keySize)
+      throws Exception {
+    Map<String, KsmKeyInfo> keyLocationMap = Maps.newHashMap();
+    String volume = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucket = "bucket" + RandomStringUtils.randomNumeric(5);
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    UserArgs userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volume, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucket, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    // Write 20 keys in bucket.
+    Set<String> keyNames = Sets.newHashSet();
+    KeyArgs keyArgs;
+    for (int i = 0; i < numOfKeys; i++) {
+      String keyName = RandomStringUtils.randomAlphabetic(5) + i;
+      keyNames.add(keyName);
+      keyArgs = new KeyArgs(keyName, bucketArgs);
+      keyArgs.setSize(keySize);
+      // Just for testing list keys call, so no need to write real data.
+      OutputStream stream = storageHandler.newKeyWriter(keyArgs);
+      stream.write(DFSUtil.string2Bytes(
+          RandomStringUtils.randomAlphabetic(5)));
+      stream.close();
+    }
+
+    for (String key : keyNames) {
+      KsmKeyArgs arg = new KsmKeyArgs.Builder()
+          .setVolumeName(volume)
+          .setBucketName(bucket)
+          .setKeyName(key)
+          .build();
+      KsmKeyInfo location = cluster.getKeySpaceManager()
+          .lookupKey(arg);
+      keyLocationMap.put(key, location);
+    }
+    return keyLocationMap;
+  }
+
+  public List<String> getPendingDeletionBlocks(String containerName)
+      throws IOException {
+    List<String> pendingDeletionBlocks = Lists.newArrayList();
+    MetadataStore meta = getContainerMetadata(containerName);
+    KeyPrefixFilter filter =
+        new KeyPrefixFilter(OzoneConsts.DELETING_KEY_PREFIX);
+    List<Map.Entry<byte[], byte[]>> kvs = meta
+        .getRangeKVs(null, Integer.MAX_VALUE, filter);
+    kvs.forEach(entry -> {
+      String key = DFSUtil.bytes2String(entry.getKey());
+      pendingDeletionBlocks
+          .add(key.replace(OzoneConsts.DELETING_KEY_PREFIX, ""));
+    });
+    return pendingDeletionBlocks;
+  }
+
+  public List<String> getAllBlocks(Set<String> containerNames)
+      throws IOException {
+    List<String> allBlocks = Lists.newArrayList();
+    for (String containerName : containerNames) {
+      allBlocks.addAll(getAllBlocks(containerName));
+    }
+    return allBlocks;
+  }
+
+  public List<String> getAllBlocks(String containerName) throws IOException {
+    List<String> allBlocks = Lists.newArrayList();
+    MetadataStore meta = getContainerMetadata(containerName);
+    MetadataKeyFilter filter =
+        (preKey, currentKey, nextKey) -> !DFSUtil.bytes2String(currentKey)
+            .startsWith(OzoneConsts.DELETING_KEY_PREFIX);
+    List<Map.Entry<byte[], byte[]>> kvs =
+        meta.getRangeKVs(null, Integer.MAX_VALUE, filter);
+    kvs.forEach(entry -> {
+      String key = DFSUtil.bytes2String(entry.getKey());
+      allBlocks.add(key.replace(OzoneConsts.DELETING_KEY_PREFIX, ""));
+    });
+    return allBlocks;
+  }
+
+  private MetadataStore getContainerMetadata(String containerName)
+      throws IOException {
+    Pipeline pipeline = cluster.getStorageContainerManager()
+        .getContainer(containerName);
+    DatanodeDetails leadDN = pipeline.getLeader();
+    OzoneContainer containerServer =
+        getContainerServerByDatanodeUuid(leadDN.getUuidString());
+    ContainerData containerData = containerServer.getContainerManager()
+        .readContainer(containerName);
+    return KeyUtils.getDB(containerData, conf);
+  }
+
+  private OzoneContainer getContainerServerByDatanodeUuid(String dnUUID)
+      throws IOException {
+    for (HddsDatanodeService dn : cluster.getHddsDatanodes()) {
+      if (dn.getDatanodeDetails().getUuidString().equals(dnUUID)) {
+        return dn.getDatanodeStateMachine().getContainer();
+      }
+    }
+    throw new IOException("Unable to get the ozone container "
+        + "for given datanode ID " + dnUUID);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java
new file mode 100644
index 0000000..a94ee6c
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java
@@ -0,0 +1,404 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.BucketArgs;
+import org.apache.hadoop.ozone.client.ObjectStore;
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneClientFactory;
+import org.apache.hadoop.ozone.client.OzoneKey;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+import org.apache.hadoop.ozone.client.OzoneVolume;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.ozone.client.VolumeArgs;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * This class is to test all the public facing APIs of Ozone REST Client.
+ */
+public class TestOzoneRestClient {
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  private static MiniOzoneCluster cluster = null;
+  private static OzoneClient ozClient = null;
+  private static ObjectStore store = null;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(1).build();
+    cluster.waitForClusterToBeReady();
+    InetSocketAddress ksmHttpAddress = cluster.getKeySpaceManager()
+        .getHttpServer().getHttpAddress();
+    ozClient = OzoneClientFactory.getRestClient(ksmHttpAddress.getHostName(),
+        ksmHttpAddress.getPort(), conf);
+    store = ozClient.getObjectStore();
+  }
+
+
+  @Test
+  public void testCreateVolume()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(volumeName, volume.getName());
+  }
+
+  @Test
+  public void testCreateVolumeWithOwner()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    VolumeArgs.Builder argsBuilder = VolumeArgs.newBuilder();
+    argsBuilder.setOwner("test");
+    store.createVolume(volumeName, argsBuilder.build());
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(volumeName, volume.getName());
+    Assert.assertEquals("test", volume.getOwner());
+  }
+
+  @Test
+  public void testCreateVolumeWithQuota()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    VolumeArgs.Builder argsBuilder = VolumeArgs.newBuilder();
+    argsBuilder.setOwner("test").setQuota("1000000000 BYTES");
+    store.createVolume(volumeName, argsBuilder.build());
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(volumeName, volume.getName());
+    Assert.assertEquals("test", volume.getOwner());
+    Assert.assertEquals(1000000000L, volume.getQuota());
+  }
+
+  @Test
+  public void testVolumeAlreadyExist()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    try {
+      store.createVolume(volumeName);
+    } catch (IOException ex) {
+      Assert.assertEquals(
+          "Volume creation failed, error:VOLUME_ALREADY_EXISTS",
+          ex.getCause().getMessage());
+    }
+  }
+
+  @Test
+  public void testSetVolumeOwner()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    store.getVolume(volumeName).setOwner("test");
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals("test", volume.getOwner());
+  }
+
+  @Test
+  public void testSetVolumeQuota()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    store.getVolume(volumeName).setQuota(
+        OzoneQuota.parseQuota("100000000 BYTES"));
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(100000000L, volume.getQuota());
+  }
+
+  @Test
+  public void testDeleteVolume()
+      throws IOException, OzoneException {
+    thrown.expectMessage("Info Volume failed, error");
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertNotNull(volume);
+    store.deleteVolume(volumeName);
+    store.getVolume(volumeName);
+  }
+
+  @Test
+  public void testCreateBucket()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+  }
+
+  @Test
+  public void testCreateBucketWithVersioning()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setVersioning(true);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertEquals(true, bucket.getVersioning());
+  }
+
+  @Test
+  public void testCreateBucketWithStorageType()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setStorageType(StorageType.SSD);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertEquals(StorageType.SSD, bucket.getStorageType());
+  }
+
+  @Test
+  public void testCreateBucketWithAcls()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    OzoneAcl userAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(userAcl);
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setAcls(acls);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertTrue(bucket.getAcls().contains(userAcl));
+  }
+
+  @Test
+  public void testCreateBucketWithAllArgument()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    OzoneAcl userAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(userAcl);
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setVersioning(true)
+        .setStorageType(StorageType.SSD)
+        .setAcls(acls);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertEquals(true, bucket.getVersioning());
+    Assert.assertEquals(StorageType.SSD, bucket.getStorageType());
+    Assert.assertTrue(bucket.getAcls().contains(userAcl));
+  }
+
+  @Test
+  public void testAddBucketAcl()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(new OzoneAcl(
+        OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE));
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.addAcls(acls);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertTrue(bucket.getAcls().contains(acls.get(0)));
+  }
+
+  @Test
+  public void testRemoveBucketAcl()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    OzoneAcl userAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(userAcl);
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setAcls(acls);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.removeAcls(acls);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertTrue(!bucket.getAcls().contains(acls.get(0)));
+  }
+
+  @Test
+  public void testSetBucketVersioning()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.setVersioning(true);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertEquals(true, newBucket.getVersioning());
+  }
+
+  @Test
+  public void testSetBucketStorageType()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.setStorageType(StorageType.SSD);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertEquals(StorageType.SSD, newBucket.getStorageType());
+  }
+
+
+  @Test
+  public void testDeleteBucket()
+      throws IOException, OzoneException {
+    thrown.expectMessage("Info Bucket failed, error");
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertNotNull(bucket);
+    volume.deleteBucket(bucketName);
+    volume.getBucket(bucketName);
+  }
+
+
+  @Test
+  public void testPutKey()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+
+    String value = "sample value";
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+
+    for (int i = 0; i < 10; i++) {
+      String keyName = UUID.randomUUID().toString();
+
+      OzoneOutputStream out = bucket.createKey(keyName,
+          value.getBytes().length, ReplicationType.STAND_ALONE,
+          ReplicationFactor.ONE);
+      out.write(value.getBytes());
+      out.close();
+      OzoneKey key = bucket.getKey(keyName);
+      Assert.assertEquals(keyName, key.getName());
+      OzoneInputStream is = bucket.readKey(keyName);
+      byte[] fileContent = new byte[value.getBytes().length];
+      is.read(fileContent);
+      Assert.assertEquals(value, new String(fileContent));
+    }
+  }
+
+  @Test
+  public void testDeleteKey()
+      throws IOException, OzoneException {
+    thrown.expectMessage("Lookup key failed, error");
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    String keyName = UUID.randomUUID().toString();
+    String value = "sample value";
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    OzoneOutputStream out = bucket.createKey(keyName,
+        value.getBytes().length, ReplicationType.STAND_ALONE,
+        ReplicationFactor.ONE);
+    out.write(value.getBytes());
+    out.close();
+    OzoneKey key = bucket.getKey(keyName);
+    Assert.assertEquals(keyName, key.getName());
+    bucket.deleteKey(keyName);
+    bucket.getKey(keyName);
+  }
+
+  /**
+   * Close OzoneClient and shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() throws IOException {
+    if(ozClient != null) {
+      ozClient.close();
+    }
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/package-info.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/package-info.java
new file mode 100644
index 0000000..c8940e4
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rest;
+
+/**
+ * This package contains test class for Ozone rest client library.
+ */
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java
new file mode 100644
index 0000000..32a70a2
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java
@@ -0,0 +1,815 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rpc;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.BucketArgs;
+import org.apache.hadoop.ozone.client.ObjectStore;
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneClientFactory;
+import org.apache.hadoop.ozone.client.OzoneKey;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+import org.apache.hadoop.ozone.client.OzoneVolume;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.ozone.client.VolumeArgs;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.ozone.ksm.KeySpaceManager;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB.
+    StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.util.Time;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * This class is to test all the public facing APIs of Ozone Client.
+ */
+public class TestOzoneRpcClient {
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  private static MiniOzoneCluster cluster = null;
+  private static OzoneClient ozClient = null;
+  private static ObjectStore store = null;
+  private static KeySpaceManager keySpaceManager;
+  private static StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+
+  /**
+   * Create a MiniOzoneCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    conf.setInt(ScmConfigKeys.OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE, 1);
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(10).build();
+    cluster.waitForClusterToBeReady();
+    ozClient = OzoneClientFactory.getRpcClient(conf);
+    store = ozClient.getObjectStore();
+    storageContainerLocationClient =
+        cluster.getStorageContainerLocationClient();
+    keySpaceManager = cluster.getKeySpaceManager();
+  }
+
+  @Test
+  public void testCreateVolume()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(volumeName, volume.getName());
+  }
+
+  @Test
+  public void testCreateVolumeWithOwner()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    VolumeArgs.Builder argsBuilder = VolumeArgs.newBuilder();
+    argsBuilder.setOwner("test");
+    store.createVolume(volumeName, argsBuilder.build());
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(volumeName, volume.getName());
+    Assert.assertEquals("test", volume.getOwner());
+  }
+
+  @Test
+  public void testCreateVolumeWithQuota()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    VolumeArgs.Builder argsBuilder = VolumeArgs.newBuilder();
+    argsBuilder.setOwner("test").setQuota("1000000000 BYTES");
+    store.createVolume(volumeName, argsBuilder.build());
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(volumeName, volume.getName());
+    Assert.assertEquals("test", volume.getOwner());
+    Assert.assertEquals(1000000000L, volume.getQuota());
+  }
+
+  @Test
+  public void testInvalidVolumeCreation() throws IOException {
+    thrown.expectMessage("Bucket or Volume name has an unsupported" +
+        " character : #");
+    String volumeName = "invalid#name";
+    store.createVolume(volumeName);
+  }
+
+  @Test
+  public void testVolumeAlreadyExist()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    try {
+      store.createVolume(volumeName);
+    } catch (IOException ex) {
+      Assert.assertEquals(
+          "Volume creation failed, error:VOLUME_ALREADY_EXISTS",
+          ex.getMessage());
+    }
+  }
+
+  @Test
+  public void testSetVolumeOwner()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    store.getVolume(volumeName).setOwner("test");
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals("test", volume.getOwner());
+  }
+
+  @Test
+  public void testSetVolumeQuota()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    store.getVolume(volumeName).setQuota(
+        OzoneQuota.parseQuota("100000000 BYTES"));
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertEquals(100000000L, volume.getQuota());
+  }
+
+  @Test
+  public void testDeleteVolume()
+      throws IOException, OzoneException {
+    thrown.expectMessage("Info Volume failed, error");
+    String volumeName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    Assert.assertNotNull(volume);
+    store.deleteVolume(volumeName);
+    store.getVolume(volumeName);
+  }
+
+  @Test
+  public void testCreateBucket()
+      throws IOException, OzoneException {
+    long currentTime = Time.now();
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertTrue(bucket.getCreationTime() >= currentTime);
+    Assert.assertTrue(volume.getCreationTime() >= currentTime);
+  }
+
+  @Test
+  public void testCreateBucketWithVersioning()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setVersioning(true);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertEquals(true, bucket.getVersioning());
+  }
+
+  @Test
+  public void testCreateBucketWithStorageType()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setStorageType(StorageType.SSD);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertEquals(StorageType.SSD, bucket.getStorageType());
+  }
+
+  @Test
+  public void testCreateBucketWithAcls()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    OzoneAcl userAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(userAcl);
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setAcls(acls);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertTrue(bucket.getAcls().contains(userAcl));
+  }
+
+  @Test
+  public void testCreateBucketWithAllArgument()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    OzoneAcl userAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(userAcl);
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setVersioning(true)
+        .setStorageType(StorageType.SSD)
+        .setAcls(acls);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, bucket.getName());
+    Assert.assertEquals(true, bucket.getVersioning());
+    Assert.assertEquals(StorageType.SSD, bucket.getStorageType());
+    Assert.assertTrue(bucket.getAcls().contains(userAcl));
+  }
+
+  @Test
+  public void testInvalidBucketCreation() throws IOException {
+    thrown.expectMessage("Bucket or Volume name has an unsupported" +
+        " character : #");
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = "invalid#bucket";
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+  }
+
+  @Test
+  public void testAddBucketAcl()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(new OzoneAcl(
+        OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE));
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.addAcls(acls);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertTrue(bucket.getAcls().contains(acls.get(0)));
+  }
+
+  @Test
+  public void testRemoveBucketAcl()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    OzoneAcl userAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER, "test",
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    List<OzoneAcl> acls = new ArrayList<>();
+    acls.add(userAcl);
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    BucketArgs.Builder builder = BucketArgs.newBuilder();
+    builder.setAcls(acls);
+    volume.createBucket(bucketName, builder.build());
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.removeAcls(acls);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertTrue(!bucket.getAcls().contains(acls.get(0)));
+  }
+
+  @Test
+  public void testSetBucketVersioning()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.setVersioning(true);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertEquals(true, newBucket.getVersioning());
+  }
+
+  @Test
+  public void testSetBucketStorageType()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    bucket.setStorageType(StorageType.SSD);
+    OzoneBucket newBucket = volume.getBucket(bucketName);
+    Assert.assertEquals(bucketName, newBucket.getName());
+    Assert.assertEquals(StorageType.SSD, newBucket.getStorageType());
+  }
+
+
+  @Test
+  public void testDeleteBucket()
+      throws IOException, OzoneException {
+    thrown.expectMessage("Info Bucket failed, error");
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    Assert.assertNotNull(bucket);
+    volume.deleteBucket(bucketName);
+    volume.getBucket(bucketName);
+  }
+
+  private boolean verifyRatisReplication(String volumeName, String bucketName,
+      String keyName, ReplicationType type, ReplicationFactor factor)
+      throws IOException {
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .build();
+    HddsProtos.ReplicationType replicationType =
+        HddsProtos.ReplicationType.valueOf(type.toString());
+    HddsProtos.ReplicationFactor replicationFactor =
+        HddsProtos.ReplicationFactor.valueOf(factor.getValue());
+    KsmKeyInfo keyInfo = keySpaceManager.lookupKey(keyArgs);
+    for (KsmKeyLocationInfo info:
+        keyInfo.getLatestVersionLocations().getLocationList()) {
+      Pipeline pipeline =
+          storageContainerLocationClient.getContainer(info.getContainerName());
+      if ((pipeline.getFactor() != replicationFactor) ||
+          (pipeline.getType() != replicationType)) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Test
+  public void testPutKey()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    long currentTime = Time.now();
+
+    String value = "sample value";
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+
+    for (int i = 0; i < 10; i++) {
+      String keyName = UUID.randomUUID().toString();
+
+      OzoneOutputStream out = bucket.createKey(keyName,
+          value.getBytes().length, ReplicationType.STAND_ALONE,
+          ReplicationFactor.ONE);
+      out.write(value.getBytes());
+      out.close();
+      OzoneKey key = bucket.getKey(keyName);
+      Assert.assertEquals(keyName, key.getName());
+      OzoneInputStream is = bucket.readKey(keyName);
+      byte[] fileContent = new byte[value.getBytes().length];
+      is.read(fileContent);
+      Assert.assertTrue(verifyRatisReplication(volumeName, bucketName,
+          keyName, ReplicationType.STAND_ALONE,
+          ReplicationFactor.ONE));
+      Assert.assertEquals(value, new String(fileContent));
+      Assert.assertTrue(key.getCreationTime() >= currentTime);
+      Assert.assertTrue(key.getModificationTime() >= currentTime);
+    }
+  }
+
+  @Test
+  public void testPutKeyRatisOneNode()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    long currentTime = Time.now();
+
+    String value = "sample value";
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+
+    for (int i = 0; i < 10; i++) {
+      String keyName = UUID.randomUUID().toString();
+
+      OzoneOutputStream out = bucket.createKey(keyName,
+          value.getBytes().length, ReplicationType.RATIS,
+          ReplicationFactor.ONE);
+      out.write(value.getBytes());
+      out.close();
+      OzoneKey key = bucket.getKey(keyName);
+      Assert.assertEquals(keyName, key.getName());
+      OzoneInputStream is = bucket.readKey(keyName);
+      byte[] fileContent = new byte[value.getBytes().length];
+      is.read(fileContent);
+      is.close();
+      Assert.assertTrue(verifyRatisReplication(volumeName, bucketName,
+          keyName, ReplicationType.RATIS, ReplicationFactor.ONE));
+      Assert.assertEquals(value, new String(fileContent));
+      Assert.assertTrue(key.getCreationTime() >= currentTime);
+      Assert.assertTrue(key.getModificationTime() >= currentTime);
+    }
+  }
+
+  @Test
+  public void testPutKeyRatisThreeNodes()
+      throws IOException, OzoneException {
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    long currentTime = Time.now();
+
+    String value = "sample value";
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+
+    for (int i = 0; i < 10; i++) {
+      String keyName = UUID.randomUUID().toString();
+
+      OzoneOutputStream out = bucket.createKey(keyName,
+          value.getBytes().length, ReplicationType.RATIS,
+          ReplicationFactor.THREE);
+      out.write(value.getBytes());
+      out.close();
+      OzoneKey key = bucket.getKey(keyName);
+      Assert.assertEquals(keyName, key.getName());
+      OzoneInputStream is = bucket.readKey(keyName);
+      byte[] fileContent = new byte[value.getBytes().length];
+      is.read(fileContent);
+      is.close();
+      Assert.assertTrue(verifyRatisReplication(volumeName, bucketName,
+          keyName, ReplicationType.RATIS,
+          ReplicationFactor.THREE));
+      Assert.assertEquals(value, new String(fileContent));
+      Assert.assertTrue(key.getCreationTime() >= currentTime);
+      Assert.assertTrue(key.getModificationTime() >= currentTime);
+    }
+  }
+
+  @Test
+  public void testDeleteKey()
+      throws IOException, OzoneException {
+    thrown.expectMessage("Lookup key failed, error");
+    String volumeName = UUID.randomUUID().toString();
+    String bucketName = UUID.randomUUID().toString();
+    String keyName = UUID.randomUUID().toString();
+    String value = "sample value";
+    store.createVolume(volumeName);
+    OzoneVolume volume = store.getVolume(volumeName);
+    volume.createBucket(bucketName);
+    OzoneBucket bucket = volume.getBucket(bucketName);
+    OzoneOutputStream out = bucket.createKey(keyName,
+        value.getBytes().length, ReplicationType.STAND_ALONE,
+        ReplicationFactor.ONE);
+    out.write(value.getBytes());
+    out.close();
+    OzoneKey key = bucket.getKey(keyName);
+    Assert.assertEquals(keyName, key.getName());
+    bucket.deleteKey(keyName);
+    bucket.getKey(keyName);
+  }
+
+  @Test
+  public void testListVolume() throws IOException, OzoneException {
+    String volBase = "vol-" + RandomStringUtils.randomNumeric(3);
+    //Create 10 volume vol-<random>-a-0-<random> to vol-<random>-a-9-<random>
+    String volBaseNameA = volBase + "-a-";
+    for(int i = 0; i < 10; i++) {
+      store.createVolume(
+          volBaseNameA + i + "-" + RandomStringUtils.randomNumeric(5));
+    }
+    //Create 10 volume vol-<random>-b-0-<random> to vol-<random>-b-9-<random>
+    String volBaseNameB = volBase + "-b-";
+    for(int i = 0; i < 10; i++) {
+      store.createVolume(
+          volBaseNameB + i + "-" + RandomStringUtils.randomNumeric(5));
+    }
+    Iterator<OzoneVolume> volIterator = store.listVolumes(volBase);
+    int totalVolumeCount = 0;
+    while(volIterator.hasNext()) {
+      volIterator.next();
+      totalVolumeCount++;
+    }
+    Assert.assertEquals(20, totalVolumeCount);
+    Iterator<OzoneVolume> volAIterator = store.listVolumes(volBaseNameA);
+    for(int i = 0; i < 10; i++) {
+      Assert.assertTrue(volAIterator.next().getName()
+          .startsWith(volBaseNameA + i + "-"));
+    }
+    Assert.assertFalse(volAIterator.hasNext());
+    Iterator<OzoneVolume> volBIterator = store.listVolumes(volBaseNameB);
+    for(int i = 0; i < 10; i++) {
+      Assert.assertTrue(volBIterator.next().getName()
+          .startsWith(volBaseNameB + i + "-"));
+    }
+    Assert.assertFalse(volBIterator.hasNext());
+    Iterator<OzoneVolume> iter = store.listVolumes(volBaseNameA + "1-");
+    Assert.assertTrue(iter.next().getName().startsWith(volBaseNameA + "1-"));
+    Assert.assertFalse(iter.hasNext());
+  }
+
+  @Test
+  public void testListBucket()
+      throws IOException, OzoneException {
+    String volumeA = "vol-a-" + RandomStringUtils.randomNumeric(5);
+    String volumeB = "vol-b-" + RandomStringUtils.randomNumeric(5);
+    store.createVolume(volumeA);
+    store.createVolume(volumeB);
+    OzoneVolume volA = store.getVolume(volumeA);
+    OzoneVolume volB = store.getVolume(volumeB);
+
+    //Create 10 buckets in  vol-a-<random> and 10 in vol-b-<random>
+    String bucketBaseNameA = "bucket-a-";
+    for(int i = 0; i < 10; i++) {
+      volA.createBucket(
+          bucketBaseNameA + i + "-" + RandomStringUtils.randomNumeric(5));
+      volB.createBucket(
+          bucketBaseNameA + i + "-" + RandomStringUtils.randomNumeric(5));
+    }
+    //Create 10 buckets in vol-a-<random> and 10 in vol-b-<random>
+    String bucketBaseNameB = "bucket-b-";
+    for(int i = 0; i < 10; i++) {
+      volA.createBucket(
+          bucketBaseNameB + i + "-" + RandomStringUtils.randomNumeric(5));
+      volB.createBucket(
+          bucketBaseNameB + i + "-" + RandomStringUtils.randomNumeric(5));
+    }
+    Iterator<OzoneBucket> volABucketIter =
+        volA.listBuckets("bucket-");
+    int volABucketCount = 0;
+    while(volABucketIter.hasNext()) {
+      volABucketIter.next();
+      volABucketCount++;
+    }
+    Assert.assertEquals(20, volABucketCount);
+    Iterator<OzoneBucket> volBBucketIter =
+        volA.listBuckets("bucket-");
+    int volBBucketCount = 0;
+    while(volBBucketIter.hasNext()) {
+      volBBucketIter.next();
+      volBBucketCount++;
+    }
+    Assert.assertEquals(20, volBBucketCount);
+
+    Iterator<OzoneBucket> volABucketAIter =
+        volA.listBuckets("bucket-a-");
+    int volABucketACount = 0;
+    while(volABucketAIter.hasNext()) {
+      volABucketAIter.next();
+      volABucketACount++;
+    }
+    Assert.assertEquals(10, volABucketACount);
+    Iterator<OzoneBucket> volBBucketBIter =
+        volA.listBuckets("bucket-b-");
+    int volBBucketBCount = 0;
+    while(volBBucketBIter.hasNext()) {
+      volBBucketBIter.next();
+      volBBucketBCount++;
+    }
+    Assert.assertEquals(10, volBBucketBCount);
+    Iterator<OzoneBucket> volABucketBIter = volA.listBuckets("bucket-b-");
+    for(int i = 0; i < 10; i++) {
+      Assert.assertTrue(volABucketBIter.next().getName()
+          .startsWith(bucketBaseNameB + i + "-"));
+    }
+    Assert.assertFalse(volABucketBIter.hasNext());
+    Iterator<OzoneBucket> volBBucketAIter = volB.listBuckets("bucket-a-");
+    for(int i = 0; i < 10; i++) {
+      Assert.assertTrue(volBBucketAIter.next().getName()
+          .startsWith(bucketBaseNameA + i + "-"));
+    }
+    Assert.assertFalse(volBBucketAIter.hasNext());
+
+  }
+
+  @Test
+  public void testListBucketsOnEmptyVolume()
+      throws IOException, OzoneException {
+    String volume = "vol-" + RandomStringUtils.randomNumeric(5);
+    store.createVolume(volume);
+    OzoneVolume vol = store.getVolume(volume);
+    Iterator<OzoneBucket> buckets = vol.listBuckets("");
+    while(buckets.hasNext()) {
+      Assert.fail();
+    }
+  }
+
+  @Test
+  public void testListKey()
+      throws IOException, OzoneException {
+    String volumeA = "vol-a-" + RandomStringUtils.randomNumeric(5);
+    String volumeB = "vol-b-" + RandomStringUtils.randomNumeric(5);
+    String bucketA = "buc-a-" + RandomStringUtils.randomNumeric(5);
+    String bucketB = "buc-b-" + RandomStringUtils.randomNumeric(5);
+    store.createVolume(volumeA);
+    store.createVolume(volumeB);
+    OzoneVolume volA = store.getVolume(volumeA);
+    OzoneVolume volB = store.getVolume(volumeB);
+    volA.createBucket(bucketA);
+    volA.createBucket(bucketB);
+    volB.createBucket(bucketA);
+    volB.createBucket(bucketB);
+    OzoneBucket volAbucketA = volA.getBucket(bucketA);
+    OzoneBucket volAbucketB = volA.getBucket(bucketB);
+    OzoneBucket volBbucketA = volB.getBucket(bucketA);
+    OzoneBucket volBbucketB = volB.getBucket(bucketB);
+
+    /*
+    Create 10 keys in  vol-a-<random>/buc-a-<random>,
+    vol-a-<random>/buc-b-<random>, vol-b-<random>/buc-a-<random> and
+    vol-b-<random>/buc-b-<random>
+     */
+    String keyBaseA = "key-a-";
+    for (int i = 0; i < 10; i++) {
+      byte[] value = RandomStringUtils.randomAscii(10240).getBytes();
+      OzoneOutputStream one = volAbucketA.createKey(
+          keyBaseA + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      one.write(value);
+      one.close();
+      OzoneOutputStream two = volAbucketB.createKey(
+          keyBaseA + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      two.write(value);
+      two.close();
+      OzoneOutputStream three = volBbucketA.createKey(
+          keyBaseA + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      three.write(value);
+      three.close();
+      OzoneOutputStream four = volBbucketB.createKey(
+          keyBaseA + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      four.write(value);
+      four.close();
+    }
+    /*
+    Create 10 keys in  vol-a-<random>/buc-a-<random>,
+    vol-a-<random>/buc-b-<random>, vol-b-<random>/buc-a-<random> and
+    vol-b-<random>/buc-b-<random>
+     */
+    String keyBaseB = "key-b-";
+    for (int i = 0; i < 10; i++) {
+      byte[] value = RandomStringUtils.randomAscii(10240).getBytes();
+      OzoneOutputStream one = volAbucketA.createKey(
+          keyBaseB + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      one.write(value);
+      one.close();
+      OzoneOutputStream two = volAbucketB.createKey(
+          keyBaseB + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      two.write(value);
+      two.close();
+      OzoneOutputStream three = volBbucketA.createKey(
+          keyBaseB + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      three.write(value);
+      three.close();
+      OzoneOutputStream four = volBbucketB.createKey(
+          keyBaseB + i + "-" + RandomStringUtils.randomNumeric(5),
+          value.length, ReplicationType.STAND_ALONE, ReplicationFactor.ONE);
+      four.write(value);
+      four.close();
+    }
+    Iterator<OzoneKey> volABucketAIter =
+        volAbucketA.listKeys("key-");
+    int volABucketAKeyCount = 0;
+    while(volABucketAIter.hasNext()) {
+      volABucketAIter.next();
+      volABucketAKeyCount++;
+    }
+    Assert.assertEquals(20, volABucketAKeyCount);
+    Iterator<OzoneKey> volABucketBIter =
+        volAbucketB.listKeys("key-");
+    int volABucketBKeyCount = 0;
+    while(volABucketBIter.hasNext()) {
+      volABucketBIter.next();
+      volABucketBKeyCount++;
+    }
+    Assert.assertEquals(20, volABucketBKeyCount);
+    Iterator<OzoneKey> volBBucketAIter =
+        volBbucketA.listKeys("key-");
+    int volBBucketAKeyCount = 0;
+    while(volBBucketAIter.hasNext()) {
+      volBBucketAIter.next();
+      volBBucketAKeyCount++;
+    }
+    Assert.assertEquals(20, volBBucketAKeyCount);
+    Iterator<OzoneKey> volBBucketBIter =
+        volBbucketB.listKeys("key-");
+    int volBBucketBKeyCount = 0;
+    while(volBBucketBIter.hasNext()) {
+      volBBucketBIter.next();
+      volBBucketBKeyCount++;
+    }
+    Assert.assertEquals(20, volBBucketBKeyCount);
+    Iterator<OzoneKey> volABucketAKeyAIter =
+        volAbucketA.listKeys("key-a-");
+    int volABucketAKeyACount = 0;
+    while(volABucketAKeyAIter.hasNext()) {
+      volABucketAKeyAIter.next();
+      volABucketAKeyACount++;
+    }
+    Assert.assertEquals(10, volABucketAKeyACount);
+    Iterator<OzoneKey> volABucketAKeyBIter =
+        volAbucketA.listKeys("key-b-");
+    for(int i = 0; i < 10; i++) {
+      Assert.assertTrue(volABucketAKeyBIter.next().getName()
+          .startsWith("key-b-" + i + "-"));
+    }
+    Assert.assertFalse(volABucketBIter.hasNext());
+  }
+
+  @Test
+  public void testListKeyOnEmptyBucket()
+      throws IOException, OzoneException {
+    String volume = "vol-" + RandomStringUtils.randomNumeric(5);
+    String bucket = "buc-" + RandomStringUtils.randomNumeric(5);
+    store.createVolume(volume);
+    OzoneVolume vol = store.getVolume(volume);
+    vol.createBucket(bucket);
+    OzoneBucket buc = vol.getBucket(bucket);
+    Iterator<OzoneKey> keys = buc.listKeys("");
+    while(keys.hasNext()) {
+      Assert.fail();
+    }
+  }
+
+  /**
+   * Close OzoneClient and shutdown MiniOzoneCluster.
+   */
+  @AfterClass
+  public static void shutdown() throws IOException {
+    if(ozClient != null) {
+      ozClient.close();
+    }
+
+    if (storageContainerLocationClient != null) {
+      storageContainerLocationClient.close();
+    }
+
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/package-info.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/package-info.java
new file mode 100644
index 0000000..0f48495
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/package-info.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.client.rpc;
+
+/**
+ * This package contains test class for Ozone rpc client library.
+ */
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
new file mode 100644
index 0000000..acab0b2
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
@@ -0,0 +1,601 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container;
+
+import com.google.common.base.Preconditions;
+import com.google.protobuf.ByteString;
+import org.apache.commons.codec.binary.Hex;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.*;
+
+/**
+ * Helpers for container tests.
+ */
+public final class ContainerTestHelper {
+  public static final Logger LOG = LoggerFactory.getLogger(
+      ContainerTestHelper.class);
+  private static Random r = new Random();
+
+  /**
+   * Never constructed.
+   */
+  private ContainerTestHelper() {
+  }
+
+  public static void setOzoneLocalStorageRoot(
+      Class<?> clazz, OzoneConfiguration conf) {
+    String path = GenericTestUtils.getTempPath(clazz.getSimpleName());
+    path += conf.getTrimmed(
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+  }
+
+  // TODO: mock multi-node pipeline
+  /**
+   * Create a pipeline with single node replica.
+   *
+   * @return Pipeline with single node in it.
+   * @throws IOException
+   */
+  public static Pipeline createSingleNodePipeline(String containerName) throws
+      IOException {
+    return createPipeline(containerName, 1);
+  }
+
+  public static String createLocalAddress() throws IOException {
+    try(ServerSocket s = new ServerSocket(0)) {
+      return "127.0.0.1:" + s.getLocalPort();
+    }
+  }
+  public static DatanodeDetails createDatanodeDetails() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    DatanodeDetails datanodeDetails = DatanodeDetails.newBuilder()
+        .setUuid(UUID.randomUUID().toString())
+        .setIpAddress(socket.getInetAddress().getHostAddress())
+        .setHostName(socket.getInetAddress().getHostName())
+        .setContainerPort(port)
+        .setRatisPort(port)
+        .setOzoneRestPort(port)
+        .build();
+
+    socket.close();
+    return datanodeDetails;
+  }
+
+  /**
+   * Create a pipeline with single node replica.
+   *
+   * @return Pipeline with single node in it.
+   * @throws IOException
+   */
+  public static Pipeline createPipeline(String containerName, int numNodes)
+      throws IOException {
+    Preconditions.checkArgument(numNodes >= 1);
+    final List<DatanodeDetails> ids = new ArrayList<>(numNodes);
+    for(int i = 0; i < numNodes; i++) {
+      ids.add(createDatanodeDetails());
+    }
+    return createPipeline(containerName, ids);
+  }
+
+  public static Pipeline createPipeline(
+      String containerName, Iterable<DatanodeDetails> ids)
+      throws IOException {
+    Objects.requireNonNull(ids, "ids == null");
+    final Iterator<DatanodeDetails> i = ids.iterator();
+    Preconditions.checkArgument(i.hasNext());
+    final DatanodeDetails leader = i.next();
+    String pipelineName = "TEST-" + UUID.randomUUID().toString().substring(3);
+    final PipelineChannel pipelineChannel =
+        new PipelineChannel(leader.getUuidString(), LifeCycleState.OPEN,
+            ReplicationType.STAND_ALONE, ReplicationFactor.ONE, pipelineName);
+    pipelineChannel.addMember(leader);
+    for(; i.hasNext();) {
+      pipelineChannel.addMember(i.next());
+    }
+    return new Pipeline(containerName, pipelineChannel);
+  }
+
+  /**
+   * Creates a ChunkInfo for testing.
+   *
+   * @param keyName - Name of the key
+   * @param seqNo - Chunk number.
+   * @return ChunkInfo
+   * @throws IOException
+   */
+  public static ChunkInfo getChunk(String keyName, int seqNo, long offset,
+      long len) throws IOException {
+
+    ChunkInfo info = new ChunkInfo(String.format("%s.data.%d", keyName,
+        seqNo), offset, len);
+    return info;
+  }
+
+  /**
+   * Generates some data of the requested len.
+   *
+   * @param len - Number of bytes.
+   * @return byte array with valid data.
+   */
+  public static byte[] getData(int len) {
+    byte[] data = new byte[len];
+    r.nextBytes(data);
+    return data;
+  }
+
+  /**
+   * Computes the hash and sets the value correctly.
+   *
+   * @param info - chunk info.
+   * @param data - data array
+   * @throws NoSuchAlgorithmException
+   */
+  public static void setDataChecksum(ChunkInfo info, byte[] data)
+      throws NoSuchAlgorithmException {
+    MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+    sha.update(data);
+    info.setChecksum(Hex.encodeHexString(sha.digest()));
+  }
+
+  /**
+   * Returns a writeChunk Request.
+   *
+   * @param pipeline - A set of machines where this container lives.
+   * @param containerName - Name of the container.
+   * @param keyName - Name of the Key this chunk is part of.
+   * @param datalen - Length of data.
+   * @return ContainerCommandRequestProto
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  public static ContainerCommandRequestProto getWriteChunkRequest(
+      Pipeline pipeline, String containerName, String keyName, int datalen)
+      throws IOException, NoSuchAlgorithmException {
+    LOG.trace("writeChunk {} (key={}) to pipeline=",
+        datalen, keyName, pipeline);
+    ContainerProtos.WriteChunkRequestProto.Builder writeRequest =
+        ContainerProtos.WriteChunkRequestProto
+            .newBuilder();
+
+    Pipeline newPipeline =
+        new Pipeline(containerName, pipeline.getPipelineChannel());
+    writeRequest.setPipeline(newPipeline.getProtobufMessage());
+    writeRequest.setKeyName(keyName);
+
+    byte[] data = getData(datalen);
+    ChunkInfo info = getChunk(keyName, 0, 0, datalen);
+    setDataChecksum(info, data);
+
+    writeRequest.setChunkData(info.getProtoBufMessage());
+    writeRequest.setData(ByteString.copyFrom(data));
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.WriteChunk);
+    request.setWriteChunk(writeRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(newPipeline.getLeader().getUuidString());
+
+    return request.build();
+  }
+
+  /**
+   * Returns PutSmallFile Request that we can send to the container.
+   *
+   * @param pipeline - Pipeline
+   * @param containerName - ContainerName.
+   * @param keyName - KeyName
+   * @param dataLen - Number of bytes in the data
+   * @return ContainerCommandRequestProto
+   */
+  public static ContainerCommandRequestProto getWriteSmallFileRequest(
+      Pipeline pipeline, String containerName, String keyName, int dataLen)
+      throws Exception {
+    ContainerProtos.PutSmallFileRequestProto.Builder smallFileRequest =
+        ContainerProtos.PutSmallFileRequestProto.newBuilder();
+    Pipeline newPipeline =
+        new Pipeline(containerName, pipeline.getPipelineChannel());
+    byte[] data = getData(dataLen);
+    ChunkInfo info = getChunk(keyName, 0, 0, dataLen);
+    setDataChecksum(info, data);
+
+
+    ContainerProtos.PutKeyRequestProto.Builder putRequest =
+        ContainerProtos.PutKeyRequestProto.newBuilder();
+
+    putRequest.setPipeline(newPipeline.getProtobufMessage());
+    KeyData keyData = new KeyData(containerName, keyName);
+
+    List<ContainerProtos.ChunkInfo> newList = new LinkedList<>();
+    newList.add(info.getProtoBufMessage());
+    keyData.setChunks(newList);
+    putRequest.setKeyData(keyData.getProtoBufMessage());
+
+    smallFileRequest.setChunkInfo(info.getProtoBufMessage());
+    smallFileRequest.setData(ByteString.copyFrom(data));
+    smallFileRequest.setKey(putRequest);
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.PutSmallFile);
+    request.setPutSmallFile(smallFileRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(newPipeline.getLeader().getUuidString());
+    return request.build();
+  }
+
+
+  public static ContainerCommandRequestProto getReadSmallFileRequest(
+      ContainerProtos.PutKeyRequestProto putKey)
+      throws Exception {
+    ContainerProtos.GetSmallFileRequestProto.Builder smallFileRequest =
+        ContainerProtos.GetSmallFileRequestProto.newBuilder();
+    Pipeline pipeline = Pipeline.getFromProtoBuf(putKey.getPipeline());
+    ContainerCommandRequestProto getKey = getKeyRequest(putKey);
+    smallFileRequest.setKey(getKey.getGetKey());
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.GetSmallFile);
+    request.setGetSmallFile(smallFileRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(pipeline.getLeader().getUuidString());
+    return request.build();
+  }
+
+  /**
+   * Returns a read Request.
+   *
+   * @param request writeChunkRequest.
+   * @return Request.
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  public static ContainerCommandRequestProto getReadChunkRequest(
+      ContainerProtos.WriteChunkRequestProto request)
+      throws IOException, NoSuchAlgorithmException {
+    LOG.trace("readChunk key={} from pipeline={}",
+        request.getKeyName(), request.getPipeline());
+
+    ContainerProtos.ReadChunkRequestProto.Builder readRequest =
+        ContainerProtos.ReadChunkRequestProto.newBuilder();
+    Pipeline pipeline = Pipeline.getFromProtoBuf(request.getPipeline());
+    readRequest.setPipeline(request.getPipeline());
+
+    readRequest.setKeyName(request.getKeyName());
+    readRequest.setChunkData(request.getChunkData());
+
+    ContainerCommandRequestProto.Builder newRequest =
+        ContainerCommandRequestProto.newBuilder();
+    newRequest.setCmdType(ContainerProtos.Type.ReadChunk);
+    newRequest.setReadChunk(readRequest);
+    newRequest.setTraceID(UUID.randomUUID().toString());
+    newRequest.setDatanodeUuid(pipeline.getLeader().getUuidString());
+    return newRequest.build();
+  }
+
+  /**
+   * Returns a delete Request.
+   *
+   * @param writeRequest - write request
+   * @return request
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  public static ContainerCommandRequestProto getDeleteChunkRequest(
+      ContainerProtos.WriteChunkRequestProto writeRequest)
+      throws
+      IOException, NoSuchAlgorithmException {
+    LOG.trace("deleteChunk key={} from pipeline={}",
+        writeRequest.getKeyName(), writeRequest.getPipeline());
+    Pipeline pipeline = Pipeline.getFromProtoBuf(writeRequest.getPipeline());
+    ContainerProtos.DeleteChunkRequestProto.Builder deleteRequest =
+        ContainerProtos.DeleteChunkRequestProto
+            .newBuilder();
+
+    deleteRequest.setPipeline(writeRequest.getPipeline());
+    deleteRequest.setChunkData(writeRequest.getChunkData());
+    deleteRequest.setKeyName(writeRequest.getKeyName());
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.DeleteChunk);
+    request.setDeleteChunk(deleteRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(pipeline.getLeader().getUuidString());
+    return request.build();
+  }
+
+  /**
+   * Returns a create container command for test purposes. There are a bunch of
+   * tests where we need to just send a request and get a reply.
+   *
+   * @return ContainerCommandRequestProto.
+   */
+  public static ContainerCommandRequestProto getCreateContainerRequest(
+      String containerName, Pipeline pipeline) throws IOException {
+    LOG.trace("addContainer: {}", containerName);
+
+    ContainerProtos.CreateContainerRequestProto.Builder createRequest =
+        ContainerProtos.CreateContainerRequestProto
+            .newBuilder();
+    ContainerProtos.ContainerData.Builder containerData = ContainerProtos
+        .ContainerData.newBuilder();
+    containerData.setName(containerName);
+    createRequest.setPipeline(
+        ContainerTestHelper.createSingleNodePipeline(containerName)
+            .getProtobufMessage());
+    createRequest.setContainerData(containerData.build());
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.CreateContainer);
+    request.setCreateContainer(createRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(pipeline.getLeader().getUuidString());
+
+    return request.build();
+  }
+
+  /**
+   * Return an update container command for test purposes.
+   * Creates a container data based on the given meta data,
+   * and request to update an existing container with it.
+   *
+   * @param containerName
+   * @param metaData
+   * @return
+   * @throws IOException
+   */
+  public static ContainerCommandRequestProto getUpdateContainerRequest(
+      String containerName, Map<String, String> metaData) throws IOException {
+    ContainerProtos.UpdateContainerRequestProto.Builder updateRequestBuilder =
+        ContainerProtos.UpdateContainerRequestProto.newBuilder();
+    ContainerProtos.ContainerData.Builder containerData = ContainerProtos
+        .ContainerData.newBuilder();
+    containerData.setName(containerName);
+    String[] keys = metaData.keySet().toArray(new String[]{});
+    for(int i=0; i<keys.length; i++) {
+      KeyValue.Builder kvBuilder = KeyValue.newBuilder();
+      kvBuilder.setKey(keys[i]);
+      kvBuilder.setValue(metaData.get(keys[i]));
+      containerData.addMetadata(i, kvBuilder.build());
+    }
+    Pipeline pipeline =
+        ContainerTestHelper.createSingleNodePipeline(containerName);
+    updateRequestBuilder.setPipeline(pipeline.getProtobufMessage());
+    updateRequestBuilder.setContainerData(containerData.build());
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.UpdateContainer);
+    request.setUpdateContainer(updateRequestBuilder.build());
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(pipeline.getLeader().getUuidString());
+    return request.build();
+  }
+  /**
+   * Returns a create container response for test purposes. There are a bunch of
+   * tests where we need to just send a request and get a reply.
+   *
+   * @return ContainerCommandRequestProto.
+   */
+  public static ContainerCommandResponseProto
+      getCreateContainerResponse(ContainerCommandRequestProto request) {
+    ContainerProtos.CreateContainerResponseProto.Builder createResponse =
+        ContainerProtos.CreateContainerResponseProto.newBuilder();
+
+    ContainerCommandResponseProto.Builder response =
+        ContainerCommandResponseProto.newBuilder();
+    response.setCmdType(ContainerProtos.Type.CreateContainer);
+    response.setTraceID(request.getTraceID());
+    response.setCreateContainer(createResponse.build());
+    response.setResult(ContainerProtos.Result.SUCCESS);
+    return response.build();
+  }
+
+  /**
+   * Returns the PutKeyRequest for test purpose.
+   *
+   * @param writeRequest - Write Chunk Request.
+   * @return - Request
+   */
+  public static ContainerCommandRequestProto getPutKeyRequest(
+      ContainerProtos.WriteChunkRequestProto writeRequest) {
+    LOG.trace("putKey: {} to pipeline={}",
+        writeRequest.getKeyName(), writeRequest.getPipeline());
+
+    Pipeline pipeline = Pipeline.getFromProtoBuf(writeRequest.getPipeline());
+    ContainerProtos.PutKeyRequestProto.Builder putRequest =
+        ContainerProtos.PutKeyRequestProto.newBuilder();
+
+    putRequest.setPipeline(writeRequest.getPipeline());
+    KeyData keyData = new KeyData(writeRequest.getPipeline().getContainerName(),
+        writeRequest.getKeyName());
+    List<ContainerProtos.ChunkInfo> newList = new LinkedList<>();
+    newList.add(writeRequest.getChunkData());
+    keyData.setChunks(newList);
+    putRequest.setKeyData(keyData.getProtoBufMessage());
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.PutKey);
+    request.setPutKey(putRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(pipeline.getLeader().getUuidString());
+    return request.build();
+  }
+
+  /**
+   * Gets a GetKeyRequest for test purpose.
+   *
+   * @param putKeyRequest - putKeyRequest.
+   * @return - Request
+   */
+  public static ContainerCommandRequestProto getKeyRequest(
+      ContainerProtos.PutKeyRequestProto putKeyRequest) {
+    LOG.trace("getKey: name={} from pipeline={}",
+        putKeyRequest.getKeyData().getName(), putKeyRequest.getPipeline());
+    Pipeline pipeline = Pipeline.getFromProtoBuf(putKeyRequest.getPipeline());
+
+    ContainerProtos.GetKeyRequestProto.Builder getRequest =
+        ContainerProtos.GetKeyRequestProto.newBuilder();
+    ContainerProtos.KeyData.Builder keyData = ContainerProtos.KeyData
+        .newBuilder();
+    keyData.setContainerName(putKeyRequest.getPipeline().getContainerName());
+    keyData.setName(putKeyRequest.getKeyData().getName());
+    getRequest.setKeyData(keyData);
+    getRequest.setPipeline(putKeyRequest.getPipeline());
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.GetKey);
+    request.setGetKey(getRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(pipeline.getLeader().getUuidString());
+    return request.build();
+  }
+
+  /**
+   * Verify the response against the request.
+   *
+   * @param request - Request
+   * @param response - Response
+   */
+  public static void verifyGetKey(ContainerCommandRequestProto request,
+      ContainerCommandResponseProto response) {
+    Assert.assertEquals(request.getTraceID(), response.getTraceID());
+    Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+    ContainerProtos.PutKeyRequestProto putKey = request.getPutKey();
+    ContainerProtos.GetKeyRequestProto getKey = request.getGetKey();
+    Assert.assertEquals(putKey.getKeyData().getChunksCount(),
+        getKey.getKeyData().getChunksCount());
+  }
+
+  /**
+   * @param putKeyRequest - putKeyRequest.
+   * @return - Request
+   */
+  public static ContainerCommandRequestProto getDeleteKeyRequest(
+      ContainerProtos.PutKeyRequestProto putKeyRequest) {
+    LOG.trace("deleteKey: name={} from pipeline={}",
+        putKeyRequest.getKeyData().getName(), putKeyRequest.getPipeline());
+    Pipeline pipeline = Pipeline.getFromProtoBuf(putKeyRequest.getPipeline());
+    ContainerProtos.DeleteKeyRequestProto.Builder delRequest =
+        ContainerProtos.DeleteKeyRequestProto.newBuilder();
+    delRequest.setPipeline(putKeyRequest.getPipeline());
+    delRequest.setName(putKeyRequest.getKeyData().getName());
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.DeleteKey);
+    request.setDeleteKey(delRequest);
+    request.setTraceID(UUID.randomUUID().toString());
+    request.setDatanodeUuid(pipeline.getLeader().getUuidString());
+    return request.build();
+  }
+
+  /**
+   * Returns a close container request.
+   * @param pipeline - pipeline
+   * @return ContainerCommandRequestProto.
+   */
+  public static ContainerCommandRequestProto getCloseContainer(
+      Pipeline pipeline) {
+    Preconditions.checkNotNull(pipeline);
+    ContainerProtos.CloseContainerRequestProto closeRequest =
+        ContainerProtos.CloseContainerRequestProto.newBuilder().setPipeline(
+            pipeline.getProtobufMessage()).build();
+    ContainerProtos.ContainerCommandRequestProto cmd =
+        ContainerCommandRequestProto.newBuilder().setCmdType(ContainerProtos
+            .Type.CloseContainer).setCloseContainer(closeRequest)
+            .setTraceID(UUID.randomUUID().toString())
+            .setDatanodeUuid(pipeline.getLeader().getUuidString())
+            .build();
+
+    return cmd;
+  }
+
+  /**
+   * Returns a simple request without traceId.
+   * @param pipeline - pipeline
+   * @return ContainerCommandRequestProto without traceId.
+   */
+  public static ContainerCommandRequestProto getRequestWithoutTraceId(
+          Pipeline pipeline) {
+    Preconditions.checkNotNull(pipeline);
+    ContainerProtos.CloseContainerRequestProto closeRequest =
+            ContainerProtos.CloseContainerRequestProto.newBuilder().setPipeline(
+                    pipeline.getProtobufMessage()).build();
+    ContainerProtos.ContainerCommandRequestProto cmd =
+            ContainerCommandRequestProto.newBuilder().setCmdType(ContainerProtos
+                    .Type.CloseContainer).setCloseContainer(closeRequest)
+                    .setDatanodeUuid(
+                        pipeline.getLeader().getUuidString())
+                    .build();
+    return cmd;
+  }
+
+  /**
+   * Returns a delete container request.
+   * @param pipeline - pipeline
+   * @return ContainerCommandRequestProto.
+   */
+  public static ContainerCommandRequestProto getDeleteContainer(
+      Pipeline pipeline, boolean forceDelete) {
+    Preconditions.checkNotNull(pipeline);
+    ContainerProtos.DeleteContainerRequestProto deleteRequest =
+        ContainerProtos.DeleteContainerRequestProto.newBuilder().setName(
+            pipeline.getContainerName()).setPipeline(
+            pipeline.getProtobufMessage()).setForceDelete(forceDelete).build();
+    return ContainerCommandRequestProto.newBuilder()
+        .setCmdType(ContainerProtos.Type.DeleteContainer)
+        .setDeleteContainer(deleteRequest)
+        .setTraceID(UUID.randomUUID().toString())
+        .setDatanodeUuid(pipeline.getLeader().getUuidString())
+        .build();
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java
new file mode 100644
index 0000000..0f8c457
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java
@@ -0,0 +1,398 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.common;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.testutils.BlockDeletingServiceTestImpl;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl;
+import org.apache.hadoop.ozone.container.common.impl.RandomContainerDeletionChoosingPolicy;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.statemachine.background.BlockDeletingService;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
+import org.apache.hadoop.utils.BackgroundService;
+import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.utils.MetadataStore;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.BeforeClass;
+import org.junit.Before;
+import org.junit.After;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_SERVICE_INTERVAL;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL;
+import static org.apache.hadoop.ozone.container
+    .ContainerTestHelper.createSingleNodePipeline;
+
+/**
+ * Tests to test block deleting service.
+ */
+public class TestBlockDeletingService {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestBlockDeletingService.class);
+
+  private static File testRoot;
+  private static File containersDir;
+  private static File chunksDir;
+
+  @BeforeClass
+  public static void init() {
+    testRoot = GenericTestUtils
+        .getTestDir(TestBlockDeletingService.class.getSimpleName());
+    chunksDir = new File(testRoot, "chunks");
+    containersDir = new File(testRoot, "containers");
+  }
+
+  @Before
+  public void setup() throws IOException {
+    if (chunksDir.exists()) {
+      FileUtils.deleteDirectory(chunksDir);
+    }
+  }
+
+  @After
+  public void cleanup() throws IOException {
+    FileUtils.deleteDirectory(chunksDir);
+    FileUtils.deleteDirectory(containersDir);
+    FileUtils.deleteDirectory(testRoot);
+  }
+
+  private ContainerManager createContainerManager(Configuration conf)
+      throws Exception {
+    // use random container choosing policy for testing
+    conf.set(ScmConfigKeys.OZONE_SCM_CONTAINER_DELETION_CHOOSING_POLICY,
+        RandomContainerDeletionChoosingPolicy.class.getName());
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        containersDir.getAbsolutePath());
+    if (containersDir.exists()) {
+      FileUtils.deleteDirectory(containersDir);
+    }
+    ContainerManager containerManager = new ContainerManagerImpl();
+    List<StorageLocation> pathLists = new LinkedList<>();
+    pathLists.add(StorageLocation.parse(containersDir.getAbsolutePath()));
+    containerManager.init(conf, pathLists, TestUtils.getDatanodeDetails());
+    return containerManager;
+  }
+
+  /**
+   * A helper method to create some blocks and put them under deletion
+   * state for testing. This method directly updates container.db and
+   * creates some fake chunk files for testing.
+   */
+  private void createToDeleteBlocks(ContainerManager mgr,
+      Configuration conf, int numOfContainers, int numOfBlocksPerContainer,
+      int numOfChunksPerBlock, File chunkDir) throws IOException {
+    for (int x = 0; x < numOfContainers; x++) {
+      String containerName = OzoneUtils.getRequestID();
+      ContainerData data = new ContainerData(containerName, new Long(x), conf);
+      mgr.createContainer(createSingleNodePipeline(containerName), data);
+      data = mgr.readContainer(containerName);
+      MetadataStore metadata = KeyUtils.getDB(data, conf);
+      for (int j = 0; j<numOfBlocksPerContainer; j++) {
+        String blockName = containerName + "b" + j;
+        String deleteStateName = OzoneConsts.DELETING_KEY_PREFIX + blockName;
+        KeyData kd = new KeyData(containerName, deleteStateName);
+        List<ContainerProtos.ChunkInfo> chunks = Lists.newArrayList();
+        for (int k = 0; k<numOfChunksPerBlock; k++) {
+          // offset doesn't matter here
+          String chunkName = blockName + "_chunk_" + k;
+          File chunk = new File(chunkDir, chunkName);
+          FileUtils.writeStringToFile(chunk, "a chunk",
+              Charset.defaultCharset());
+          LOG.info("Creating file {}", chunk.getAbsolutePath());
+          // make sure file exists
+          Assert.assertTrue(chunk.isFile() && chunk.exists());
+          ContainerProtos.ChunkInfo info =
+              ContainerProtos.ChunkInfo.newBuilder()
+                  .setChunkName(chunk.getAbsolutePath())
+                  .setLen(0)
+                  .setOffset(0)
+                  .setChecksum("")
+                  .build();
+          chunks.add(info);
+        }
+        kd.setChunks(chunks);
+        metadata.put(DFSUtil.string2Bytes(deleteStateName),
+            kd.getProtoBufMessage().toByteArray());
+      }
+    }
+  }
+
+  /**
+   *  Run service runDeletingTasks and wait for it's been processed.
+   */
+  private void deleteAndWait(BlockDeletingServiceTestImpl service,
+      int timesOfProcessed) throws TimeoutException, InterruptedException {
+    service.runDeletingTasks();
+    GenericTestUtils.waitFor(()
+        -> service.getTimesOfProcessed() == timesOfProcessed, 100, 3000);
+  }
+
+  /**
+   * Get under deletion blocks count from DB,
+   * note this info is parsed from container.db.
+   */
+  private int getUnderDeletionBlocksCount(MetadataStore meta)
+      throws IOException {
+    List<Map.Entry<byte[], byte[]>> underDeletionBlocks =
+        meta.getRangeKVs(null, 100, new MetadataKeyFilters.KeyPrefixFilter(
+            OzoneConsts.DELETING_KEY_PREFIX));
+    return underDeletionBlocks.size();
+  }
+
+  @Test
+  public void testBlockDeletion() throws Exception {
+    Configuration conf = new OzoneConfiguration();
+    conf.setInt(OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL, 10);
+    conf.setInt(OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER, 2);
+    ContainerManager containerManager = createContainerManager(conf);
+    createToDeleteBlocks(containerManager, conf, 1, 3, 1, chunksDir);
+
+    BlockDeletingServiceTestImpl svc =
+        new BlockDeletingServiceTestImpl(containerManager, 1000, conf);
+    svc.start();
+    GenericTestUtils.waitFor(() -> svc.isStarted(), 100, 3000);
+
+    // Ensure 1 container was created
+    List<ContainerData> containerData = Lists.newArrayList();
+    containerManager.listContainer(null, 1, "", containerData);
+    Assert.assertEquals(1, containerData.size());
+    MetadataStore meta = KeyUtils.getDB(containerData.get(0), conf);
+
+    // Ensure there is 100 blocks under deletion
+    Assert.assertEquals(3, getUnderDeletionBlocksCount(meta));
+
+    // An interval will delete 1 * 2 blocks
+    deleteAndWait(svc, 1);
+    Assert.assertEquals(1, getUnderDeletionBlocksCount(meta));
+
+    deleteAndWait(svc, 2);
+    Assert.assertEquals(0, getUnderDeletionBlocksCount(meta));
+
+    deleteAndWait(svc, 3);
+    Assert.assertEquals(0, getUnderDeletionBlocksCount(meta));
+
+    svc.shutdown();
+    shutdownContainerMangaer(containerManager);
+  }
+
+  @Test
+  public void testShutdownService() throws Exception {
+    Configuration conf = new OzoneConfiguration();
+    conf.setTimeDuration(OZONE_BLOCK_DELETING_SERVICE_INTERVAL, 500,
+        TimeUnit.MILLISECONDS);
+    conf.setInt(OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL, 10);
+    conf.setInt(OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER, 10);
+    ContainerManager containerManager = createContainerManager(conf);
+    // Create 1 container with 100 blocks
+    createToDeleteBlocks(containerManager, conf, 1, 100, 1, chunksDir);
+
+    BlockDeletingServiceTestImpl service =
+        new BlockDeletingServiceTestImpl(containerManager, 1000, conf);
+    service.start();
+    GenericTestUtils.waitFor(() -> service.isStarted(), 100, 3000);
+
+    // Run some deleting tasks and verify there are threads running
+    service.runDeletingTasks();
+    GenericTestUtils.waitFor(() -> service.getThreadCount() > 0, 100, 1000);
+
+    // Wait for 1 or 2 intervals
+    Thread.sleep(1000);
+
+    // Shutdown service and verify all threads are stopped
+    service.shutdown();
+    GenericTestUtils.waitFor(() -> service.getThreadCount() == 0, 100, 1000);
+    shutdownContainerMangaer(containerManager);
+  }
+
+  @Test
+  public void testBlockDeletionTimeout() throws Exception {
+    Configuration conf = new OzoneConfiguration();
+    conf.setInt(OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL, 10);
+    conf.setInt(OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER, 2);
+    ContainerManager containerManager = createContainerManager(conf);
+    createToDeleteBlocks(containerManager, conf, 1, 3, 1, chunksDir);
+
+    // set timeout value as 1ms to trigger timeout behavior
+    long timeout  = 1;
+    BlockDeletingService svc =
+        new BlockDeletingService(containerManager, 1000, timeout, conf);
+    svc.start();
+
+    LogCapturer log = LogCapturer.captureLogs(BackgroundService.LOG);
+    GenericTestUtils.waitFor(() -> {
+      if(log.getOutput().contains(
+          "Background task executes timed out, retrying in next interval")) {
+        log.stopCapturing();
+        return true;
+      }
+
+      return false;
+    }, 1000, 100000);
+
+    log.stopCapturing();
+    svc.shutdown();
+
+    // test for normal case that doesn't have timeout limitation
+    timeout  = 0;
+    createToDeleteBlocks(containerManager, conf, 1, 3, 1, chunksDir);
+    svc =  new BlockDeletingService(containerManager, 1000, timeout, conf);
+    svc.start();
+
+    // get container meta data
+    List<ContainerData> containerData = Lists.newArrayList();
+    containerManager.listContainer(null, 1, "", containerData);
+    MetadataStore meta = KeyUtils.getDB(containerData.get(0), conf);
+
+    LogCapturer newLog = LogCapturer.captureLogs(BackgroundService.LOG);
+    GenericTestUtils.waitFor(() -> {
+      try {
+        if (getUnderDeletionBlocksCount(meta) == 0) {
+          return true;
+        }
+      } catch (IOException ignored) {
+      }
+      return false;
+    }, 1000, 100000);
+    newLog.stopCapturing();
+
+    // The block deleting successfully and shouldn't catch timed
+    // out warning log.
+    Assert.assertTrue(!newLog.getOutput().contains(
+        "Background task executes timed out, retrying in next interval"));
+    svc.shutdown();
+    shutdownContainerMangaer(containerManager);
+  }
+
+  @Test(timeout = 30000)
+  public void testContainerThrottle() throws Exception {
+    // Properties :
+    //  - Number of containers : 2
+    //  - Number of blocks per container : 1
+    //  - Number of chunks per block : 10
+    //  - Container limit per interval : 1
+    //  - Block limit per container : 1
+    //
+    // Each time only 1 container can be processed, so each time
+    // 1 block from 1 container can be deleted.
+    Configuration conf = new OzoneConfiguration();
+    // Process 1 container per interval
+    conf.setInt(OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL, 1);
+    conf.setInt(OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER, 1);
+    ContainerManager containerManager = createContainerManager(conf);
+    createToDeleteBlocks(containerManager, conf, 2, 1, 10, chunksDir);
+
+    BlockDeletingServiceTestImpl service =
+        new BlockDeletingServiceTestImpl(containerManager, 1000, conf);
+    service.start();
+
+    try {
+      GenericTestUtils.waitFor(() -> service.isStarted(), 100, 3000);
+      // 1st interval processes 1 container 1 block and 10 chunks
+      deleteAndWait(service, 1);
+      Assert.assertEquals(10, chunksDir.listFiles().length);
+    } finally {
+      service.shutdown();
+      shutdownContainerMangaer(containerManager);
+    }
+  }
+
+
+  @Test(timeout = 30000)
+  public void testBlockThrottle() throws Exception {
+    // Properties :
+    //  - Number of containers : 5
+    //  - Number of blocks per container : 3
+    //  - Number of chunks per block : 1
+    //  - Container limit per interval : 10
+    //  - Block limit per container : 2
+    //
+    // Each time containers can be all scanned, but only 2 blocks
+    // per container can be actually deleted. So it requires 2 waves
+    // to cleanup all blocks.
+    Configuration conf = new OzoneConfiguration();
+    conf.setInt(OZONE_BLOCK_DELETING_CONTAINER_LIMIT_PER_INTERVAL, 10);
+    conf.setInt(OZONE_BLOCK_DELETING_LIMIT_PER_CONTAINER, 2);
+    ContainerManager containerManager = createContainerManager(conf);
+    createToDeleteBlocks(containerManager, conf, 5, 3, 1, chunksDir);
+
+    // Make sure chunks are created
+    Assert.assertEquals(15, chunksDir.listFiles().length);
+
+    BlockDeletingServiceTestImpl service =
+        new BlockDeletingServiceTestImpl(containerManager, 1000, conf);
+    service.start();
+
+    try {
+      GenericTestUtils.waitFor(() -> service.isStarted(), 100, 3000);
+      // Total blocks = 3 * 5 = 15
+      // block per task = 2
+      // number of containers = 5
+      // each interval will at most runDeletingTasks 5 * 2 = 10 blocks
+      deleteAndWait(service, 1);
+      Assert.assertEquals(5, chunksDir.listFiles().length);
+
+      // There is only 5 blocks left to runDeletingTasks
+      deleteAndWait(service, 2);
+      Assert.assertEquals(0, chunksDir.listFiles().length);
+    } finally {
+      service.shutdown();
+      shutdownContainerMangaer(containerManager);
+    }
+  }
+
+  private void shutdownContainerMangaer(ContainerManager mgr)
+      throws IOException {
+    mgr.writeLock();
+    try {
+      mgr.shutdown();
+    } finally {
+      mgr.writeUnlock();
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDeletionChoosingPolicy.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDeletionChoosingPolicy.java
new file mode 100644
index 0000000..893f2f6
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDeletionChoosingPolicy.java
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.impl;
+
+import static org.apache.hadoop.ozone.container.ContainerTestHelper.createSingleNodePipeline;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.utils.MetadataStore;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * The class for testing container deletion choosing policy.
+ */
+public class TestContainerDeletionChoosingPolicy {
+  private static String path;
+  private static ContainerManagerImpl containerManager;
+  private static OzoneConfiguration conf;
+
+  @Before
+  public void init() throws Throwable {
+    conf = new OzoneConfiguration();
+    path = GenericTestUtils
+        .getTempPath(TestContainerDeletionChoosingPolicy.class.getSimpleName());
+    path += conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+  }
+
+  @After
+  public void shutdown() throws IOException {
+    FileUtils.deleteDirectory(new File(path));
+
+    containerManager.writeLock();
+    try{
+      containerManager.shutdown();
+    } finally {
+      containerManager.writeUnlock();
+    }
+  }
+
+  @Test
+  public void testRandomChoosingPolicy() throws IOException {
+    File containerDir = new File(path);
+    if (containerDir.exists()) {
+      FileUtils.deleteDirectory(new File(path));
+    }
+    Assert.assertTrue(containerDir.mkdirs());
+
+    conf.set(ScmConfigKeys.OZONE_SCM_CONTAINER_DELETION_CHOOSING_POLICY,
+        RandomContainerDeletionChoosingPolicy.class.getName());
+    List<StorageLocation> pathLists = new LinkedList<>();
+    pathLists.add(StorageLocation.parse(containerDir.getAbsolutePath()));
+    containerManager = new ContainerManagerImpl();
+    containerManager.init(conf, pathLists, TestUtils.getDatanodeDetails());
+
+    int numContainers = 10;
+    for (int i = 0; i < numContainers; i++) {
+      String containerName = OzoneUtils.getRequestID();
+      ContainerData data = new ContainerData(containerName, new Long(i), conf);
+      containerManager.createContainer(createSingleNodePipeline(containerName),
+          data);
+      Assert.assertTrue(
+          containerManager.getContainerMap().containsKey(containerName));
+    }
+
+    List<ContainerData> result0 = containerManager
+        .chooseContainerForBlockDeletion(5);
+    Assert.assertEquals(5, result0.size());
+
+    // test random choosing
+    List<ContainerData> result1 = containerManager
+        .chooseContainerForBlockDeletion(numContainers);
+    List<ContainerData> result2 = containerManager
+        .chooseContainerForBlockDeletion(numContainers);
+
+    boolean hasShuffled = false;
+    for (int i = 0; i < numContainers; i++) {
+      if (!result1.get(i).getContainerName()
+          .equals(result2.get(i).getContainerName())) {
+        hasShuffled = true;
+        break;
+      }
+    }
+    Assert.assertTrue("Chosen container results were same", hasShuffled);
+  }
+
+  @Test
+  public void testTopNOrderedChoosingPolicy() throws IOException {
+    File containerDir = new File(path);
+    if (containerDir.exists()) {
+      FileUtils.deleteDirectory(new File(path));
+    }
+    Assert.assertTrue(containerDir.mkdirs());
+
+    conf.set(ScmConfigKeys.OZONE_SCM_CONTAINER_DELETION_CHOOSING_POLICY,
+        TopNOrderedContainerDeletionChoosingPolicy.class.getName());
+    List<StorageLocation> pathLists = new LinkedList<>();
+    pathLists.add(StorageLocation.parse(containerDir.getAbsolutePath()));
+    containerManager = new ContainerManagerImpl();
+    DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+    containerManager.init(conf, pathLists, datanodeDetails);
+
+    int numContainers = 10;
+    Random random = new Random();
+    Map<String, Integer> name2Count = new HashMap<>();
+    // create [numContainers + 1] containers
+    for (int i = 0; i <= numContainers; i++) {
+      String containerName = OzoneUtils.getRequestID();
+      ContainerData data = new ContainerData(containerName, new Long(i), conf);
+      containerManager.createContainer(createSingleNodePipeline(containerName),
+          data);
+      Assert.assertTrue(
+          containerManager.getContainerMap().containsKey(containerName));
+
+      // don't create deletion blocks in the last container.
+      if (i == numContainers) {
+        break;
+      }
+
+      // create random number of deletion blocks and write to container db
+      int deletionBlocks = random.nextInt(numContainers) + 1;
+      // record <ContainerName, DeletionCount> value
+      name2Count.put(containerName, deletionBlocks);
+      for (int j = 0; j <= deletionBlocks; j++) {
+        MetadataStore metadata = KeyUtils.getDB(data, conf);
+        String blk = "blk" + i + "-" + j;
+        byte[] blkBytes = DFSUtil.string2Bytes(blk);
+        metadata.put(
+            DFSUtil.string2Bytes(OzoneConsts.DELETING_KEY_PREFIX + blk),
+            blkBytes);
+      }
+    }
+
+    containerManager.writeLock();
+    containerManager.shutdown();
+    containerManager.writeUnlock();
+    containerManager.init(conf, pathLists, datanodeDetails);
+
+    List<ContainerData> result0 = containerManager
+        .chooseContainerForBlockDeletion(5);
+    Assert.assertEquals(5, result0.size());
+
+    List<ContainerData> result1 = containerManager
+        .chooseContainerForBlockDeletion(numContainers + 1);
+    // the empty deletion blocks container should not be chosen
+    Assert.assertEquals(numContainers, result1.size());
+
+    // verify the order of return list
+    int lastCount = Integer.MAX_VALUE;
+    for (ContainerData data : result1) {
+      int currentCount = name2Count.remove(data.getContainerName());
+      // previous count should not smaller than next one
+      Assert.assertTrue(currentCount > 0 && currentCount <= lastCount);
+      lastCount = currentCount;
+    }
+    // ensure all the container data are compared
+    Assert.assertEquals(0, name2Count.size());
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
new file mode 100644
index 0000000..fae4c49
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
@@ -0,0 +1,905 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.impl;
+
+import org.apache.commons.codec.binary.Hex;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.utils.MetadataStore;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.rules.Timeout;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.ArrayList;
+import java.util.UUID;
+
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX;
+import static org.apache.hadoop.ozone.container.ContainerTestHelper
+    .createSingleNodePipeline;
+import static org.apache.hadoop.ozone.container.ContainerTestHelper.getChunk;
+import static org.apache.hadoop.ozone.container.ContainerTestHelper.getData;
+import static org.apache.hadoop.ozone.container.ContainerTestHelper
+    .setDataChecksum;
+import static org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .Stage.COMBINED;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Simple tests to verify that container persistence works as expected.
+ */
+public class TestContainerPersistence {
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static Logger log =
+      LoggerFactory.getLogger(TestContainerPersistence.class);
+  private static String path;
+  private static ContainerManagerImpl containerManager;
+  private static ChunkManagerImpl chunkManager;
+  private static KeyManagerImpl keyManager;
+  private static OzoneConfiguration conf;
+  private static List<StorageLocation> pathLists = new LinkedList<>();
+  private Long  containerID = 8888L;;
+
+  @BeforeClass
+  public static void init() throws Throwable {
+    conf = new OzoneConfiguration();
+    path = GenericTestUtils
+        .getTempPath(TestContainerPersistence.class.getSimpleName());
+    path += conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+
+    File containerDir = new File(path);
+    if (containerDir.exists()) {
+      FileUtils.deleteDirectory(new File(path));
+    }
+    Assert.assertTrue(containerDir.mkdirs());
+
+    containerManager = new ContainerManagerImpl();
+    chunkManager = new ChunkManagerImpl(containerManager);
+    containerManager.setChunkManager(chunkManager);
+    keyManager = new KeyManagerImpl(containerManager, conf);
+    containerManager.setKeyManager(keyManager);
+
+  }
+
+  @AfterClass
+  public static void shutdown() throws IOException {
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Before
+  public void setupPaths() throws IOException {
+    if (!new File(path).exists() && !new File(path).mkdirs()) {
+      throw new IOException("Unable to create paths. " + path);
+    }
+    StorageLocation loc = StorageLocation.parse(
+        Paths.get(path).resolve(CONTAINER_ROOT_PREFIX).toString());
+
+    pathLists.clear();
+    containerManager.getContainerMap().clear();
+
+    if (!new File(loc.getNormalizedUri()).mkdirs()) {
+      throw new IOException("unable to create paths. " +
+          loc.getNormalizedUri());
+    }
+    pathLists.add(loc);
+
+    for (String dir : conf.getStrings(DFS_DATANODE_DATA_DIR_KEY)) {
+      StorageLocation location = StorageLocation.parse(dir);
+      FileUtils.forceMkdir(new File(location.getNormalizedUri()));
+    }
+
+    containerManager.init(conf, pathLists, TestUtils.getDatanodeDetails());
+  }
+
+  @After
+  public void cleanupDir() throws IOException {
+    // Shutdown containerManager
+    containerManager.writeLock();
+    try {
+      containerManager.shutdown();
+    } finally {
+      containerManager.writeUnlock();
+    }
+
+    // Clean up SCM metadata
+    log.info("Deleting {}", path);
+    FileUtils.deleteDirectory(new File(path));
+
+    // Clean up SCM datanode container metadata/data
+    for (String dir : conf.getStrings(DFS_DATANODE_DATA_DIR_KEY)) {
+      StorageLocation location = StorageLocation.parse(dir);
+      FileUtils.deleteDirectory(new File(location.getNormalizedUri()));
+    }
+  }
+
+  @Test
+  public void testCreateContainer() throws Exception {
+
+    String containerName = OzoneUtils.getRequestID();
+    ContainerData data = new ContainerData(containerName, containerID++, conf);
+    data.addMetadata("VOLUME", "shire");
+    data.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(createSingleNodePipeline(containerName),
+        data);
+    Assert.assertTrue(containerManager.getContainerMap()
+        .containsKey(containerName));
+    ContainerStatus status = containerManager
+        .getContainerMap().get(containerName);
+
+    Assert.assertNotNull(status.getContainer());
+    Assert.assertNotNull(status.getContainer().getContainerPath());
+    Assert.assertNotNull(status.getContainer().getDBPath());
+
+
+    Assert.assertTrue(new File(status.getContainer().getContainerPath())
+        .exists());
+
+    Path meta = Paths.get(status.getContainer().getDBPath()).getParent();
+    Assert.assertTrue(meta != null && Files.exists(meta));
+
+    MetadataStore store = null;
+    try {
+      store = KeyUtils.getDB(status.getContainer(), conf);
+      Assert.assertNotNull(store);
+    } finally {
+      if (store != null) {
+        store.close();
+      }
+    }
+  }
+
+  @Test
+  public void testCreateDuplicateContainer() throws Exception {
+    String containerName = OzoneUtils.getRequestID();
+
+    ContainerData data = new ContainerData(containerName, containerID++, conf);
+    data.addMetadata("VOLUME", "shire");
+    data.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(createSingleNodePipeline(containerName),
+        data);
+    try {
+      containerManager.createContainer(createSingleNodePipeline(
+          containerName), data);
+      fail("Expected Exception not thrown.");
+    } catch (IOException ex) {
+      Assert.assertNotNull(ex);
+    }
+  }
+
+  @Test
+  public void testDeleteContainer() throws Exception {
+    String containerName1 = OzoneUtils.getRequestID();
+    String containerName2 = OzoneUtils.getRequestID();
+
+
+    ContainerData data = new ContainerData(containerName1, containerID++, conf);
+    data.addMetadata("VOLUME", "shire");
+    data.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(createSingleNodePipeline(containerName1),
+        data);
+    containerManager.closeContainer(containerName1);
+
+    data = new ContainerData(containerName2, containerID++, conf);
+    data.addMetadata("VOLUME", "shire");
+    data.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(createSingleNodePipeline(containerName2),
+        data);
+    containerManager.closeContainer(containerName2);
+
+    Assert.assertTrue(containerManager.getContainerMap()
+        .containsKey(containerName1));
+    Assert.assertTrue(containerManager.getContainerMap()
+        .containsKey(containerName2));
+
+    containerManager.deleteContainer(createSingleNodePipeline(containerName1),
+        containerName1, false);
+    Assert.assertFalse(containerManager.getContainerMap()
+        .containsKey(containerName1));
+
+    // Let us make sure that we are able to re-use a container name after
+    // delete.
+
+    data = new ContainerData(containerName1, containerID++, conf);
+    data.addMetadata("VOLUME", "shire");
+    data.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(createSingleNodePipeline(containerName1),
+        data);
+    containerManager.closeContainer(containerName1);
+
+    // Assert we still have both containers.
+    Assert.assertTrue(containerManager.getContainerMap()
+        .containsKey(containerName1));
+    Assert.assertTrue(containerManager.getContainerMap()
+        .containsKey(containerName2));
+
+    // Add some key to a container and then delete.
+    // Delete should fail because the container is no longer empty.
+    KeyData someKey = new KeyData(containerName1, "someKey");
+    someKey.setChunks(new LinkedList<ContainerProtos.ChunkInfo>());
+    keyManager.putKey(
+        createSingleNodePipeline(containerName1),
+        someKey);
+
+    exception.expect(StorageContainerException.class);
+    exception.expectMessage(
+        "Container cannot be deleted because it is not empty.");
+    containerManager.deleteContainer(
+        createSingleNodePipeline(containerName1),
+        containerName1, false);
+    Assert.assertTrue(containerManager.getContainerMap()
+        .containsKey(containerName1));
+  }
+
+  @Test
+  public void testGetContainerReports() throws Exception{
+    final int count = 10;
+    List<String> containerNames = new ArrayList<String>();
+
+    for (int i = 0; i < count; i++) {
+      String containerName = OzoneUtils.getRequestID();
+      ContainerData data = new ContainerData(containerName, containerID++,
+          conf);
+      containerManager.createContainer(createSingleNodePipeline(containerName),
+          data);
+
+      // Close a bunch of containers.
+      // Put closed container names to a list.
+      if (i%3 == 0) {
+        containerManager.closeContainer(containerName);
+        containerNames.add(containerName);
+      }
+    }
+
+    // The container report only returns reports of closed containers.
+    List<ContainerData> reports = containerManager.getContainerReports();
+    Assert.assertEquals(4, reports.size());
+    for(ContainerData report : reports) {
+      String actualName = report.getContainerName();
+      Assert.assertTrue(containerNames.remove(actualName));
+    }
+    Assert.assertTrue(containerNames.isEmpty());
+  }
+
+  /**
+   * This test creates 50 containers and reads them back 5 containers at a
+   * time and verifies that we did get back all containers.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testListContainer() throws IOException {
+    final int count = 50;
+    final int step = 5;
+
+    Map<String, ContainerData> testMap = new HashMap<>();
+    for (int x = 0; x < count; x++) {
+      String containerName = OzoneUtils.getRequestID();
+      ContainerData data = new ContainerData(containerName, containerID++,
+          conf);
+      data.addMetadata("VOLUME", "shire");
+      data.addMetadata("owner)", "bilbo");
+      containerManager.createContainer(createSingleNodePipeline(containerName),
+          data);
+      testMap.put(containerName, data);
+    }
+
+    int counter = 0;
+    String prevKey = "";
+    List<ContainerData> results = new LinkedList<>();
+    while (counter < count) {
+      containerManager.listContainer(null, step, prevKey, results);
+      for (int y = 0; y < results.size(); y++) {
+        testMap.remove(results.get(y).getContainerName());
+      }
+      counter += step;
+      String nextKey = results.get(results.size() - 1).getContainerName();
+
+      //Assert that container is returning results in a sorted fashion.
+      Assert.assertTrue(prevKey.compareTo(nextKey) < 0);
+      prevKey = nextKey;
+      results.clear();
+    }
+    // Assert that we listed all the keys that we had put into
+    // container.
+    Assert.assertTrue(testMap.isEmpty());
+  }
+
+  private ChunkInfo writeChunkHelper(String containerName, String keyName,
+      Pipeline pipeline) throws IOException,
+      NoSuchAlgorithmException {
+    final int datalen = 1024;
+    Pipeline newPipeline =
+        new Pipeline(containerName, pipeline.getPipelineChannel());
+    ContainerData cData = new ContainerData(containerName, containerID++, conf);
+    cData.addMetadata("VOLUME", "shire");
+    cData.addMetadata("owner", "bilbo");
+    if(!containerManager.getContainerMap()
+        .containsKey(containerName)) {
+      containerManager.createContainer(newPipeline, cData);
+    }
+    ChunkInfo info = getChunk(keyName, 0, 0, datalen);
+    byte[] data = getData(datalen);
+    setDataChecksum(info, data);
+    chunkManager.writeChunk(newPipeline, keyName, info, data, COMBINED);
+    return info;
+
+  }
+
+  /**
+   * Writes a single chunk.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testWriteChunk() throws IOException,
+      NoSuchAlgorithmException {
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+    writeChunkHelper(containerName, keyName, pipeline);
+  }
+
+  /**
+   * Writes many chunks of the same key into different chunk files and verifies
+   * that we have that data in many files.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testWritReadManyChunks() throws IOException,
+      NoSuchAlgorithmException {
+    final int datalen = 1024;
+    final int chunkCount = 1024;
+
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+    Map<String, ChunkInfo> fileHashMap = new HashMap<>();
+
+    ContainerData cData = new ContainerData(containerName, containerID++, conf);
+    cData.addMetadata("VOLUME", "shire");
+    cData.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(pipeline, cData);
+    for (int x = 0; x < chunkCount; x++) {
+      ChunkInfo info = getChunk(keyName, x, 0, datalen);
+      byte[] data = getData(datalen);
+      setDataChecksum(info, data);
+      chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+      String fileName = String.format("%s.data.%d", keyName, x);
+      fileHashMap.put(fileName, info);
+    }
+
+    ContainerData cNewData = containerManager.readContainer(containerName);
+    Assert.assertNotNull(cNewData);
+    Path dataDir = ContainerUtils.getDataDirectory(cNewData);
+
+    String globFormat = String.format("%s.data.*", keyName);
+    MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+
+    // Read chunk via file system and verify.
+    int count = 0;
+    try (DirectoryStream<Path> stream =
+             Files.newDirectoryStream(dataDir, globFormat)) {
+      for (Path fname : stream) {
+        sha.update(FileUtils.readFileToByteArray(fname.toFile()));
+        String val = Hex.encodeHexString(sha.digest());
+        Assert.assertEquals(fileHashMap.get(fname.getFileName().toString())
+                .getChecksum(), val);
+        count++;
+        sha.reset();
+      }
+      Assert.assertEquals(chunkCount, count);
+
+      // Read chunk via ReadChunk call.
+      sha.reset();
+      for (int x = 0; x < chunkCount; x++) {
+        String fileName = String.format("%s.data.%d", keyName, x);
+        ChunkInfo info = fileHashMap.get(fileName);
+        byte[] data = chunkManager.readChunk(pipeline, keyName, info);
+        sha.update(data);
+        Assert.assertEquals(Hex.encodeHexString(sha.digest()),
+            info.getChecksum());
+        sha.reset();
+      }
+    }
+  }
+
+  /**
+   * Test partial within a single chunk.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testPartialRead() throws Exception {
+    final int datalen = 1024;
+    final int start = datalen/4;
+    final int length = datalen/2;
+
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+
+    ContainerData cData = new ContainerData(containerName, containerID++, conf);
+    cData.addMetadata("VOLUME", "shire");
+    cData.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(pipeline, cData);
+    ChunkInfo info = getChunk(keyName, 0, 0, datalen);
+    byte[] data = getData(datalen);
+    setDataChecksum(info, data);
+    chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+
+    byte[] readData = chunkManager.readChunk(pipeline, keyName, info);
+    assertTrue(Arrays.equals(data, readData));
+
+    ChunkInfo info2 = getChunk(keyName, 0, start, length);
+    byte[] readData2 = chunkManager.readChunk(pipeline, keyName, info2);
+    assertEquals(length, readData2.length);
+    assertTrue(Arrays.equals(
+        Arrays.copyOfRange(data, start, start + length), readData2));
+  }
+
+  /**
+   * Writes a single chunk and tries to overwrite that chunk without over write
+   * flag then re-tries with overwrite flag.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testOverWrite() throws IOException,
+      NoSuchAlgorithmException {
+    final int datalen = 1024;
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+
+    ContainerData cData = new ContainerData(containerName, containerID++, conf);
+    cData.addMetadata("VOLUME", "shire");
+    cData.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(pipeline, cData);
+    ChunkInfo info = getChunk(keyName, 0, 0, datalen);
+    byte[] data = getData(datalen);
+    setDataChecksum(info, data);
+    chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+    try {
+      chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+    } catch (IOException ex) {
+      Assert.assertTrue(ex.getCause().getMessage().contains(
+          "Rejecting write chunk request. OverWrite flag required"));
+    }
+
+    // With the overwrite flag it should work now.
+    info.addMetadata(OzoneConsts.CHUNK_OVERWRITE, "true");
+    chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+    long bytesUsed = containerManager.getBytesUsed(containerName);
+    Assert.assertEquals(datalen, bytesUsed);
+
+    long bytesWrite = containerManager.getWriteBytes(containerName);
+    Assert.assertEquals(datalen * 2, bytesWrite);
+  }
+
+  /**
+   * This test writes data as many small writes and tries to read back the data
+   * in a single large read.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testMultipleWriteSingleRead() throws IOException,
+      NoSuchAlgorithmException {
+    final int datalen = 1024;
+    final int chunkCount = 1024;
+
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+
+    ContainerData cData = new ContainerData(containerName, containerID++, conf);
+    cData.addMetadata("VOLUME", "shire");
+    cData.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(pipeline, cData);
+    MessageDigest oldSha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+    for (int x = 0; x < chunkCount; x++) {
+      // we are writing to the same chunk file but at different offsets.
+      long offset = x * datalen;
+      ChunkInfo info = getChunk(keyName, 0, offset, datalen);
+      byte[] data = getData(datalen);
+      oldSha.update(data);
+      setDataChecksum(info, data);
+      chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+    }
+
+    // Request to read the whole data in a single go.
+    ChunkInfo largeChunk = getChunk(keyName, 0, 0, datalen * chunkCount);
+    byte[] newdata = chunkManager.readChunk(pipeline, keyName, largeChunk);
+    MessageDigest newSha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+    newSha.update(newdata);
+    Assert.assertEquals(Hex.encodeHexString(oldSha.digest()),
+        Hex.encodeHexString(newSha.digest()));
+  }
+
+  /**
+   * Writes a chunk and deletes it, re-reads to make sure it is gone.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testDeleteChunk() throws IOException,
+      NoSuchAlgorithmException {
+    final int datalen = 1024;
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+
+    ContainerData cData = new ContainerData(containerName, containerID++, conf);
+    cData.addMetadata("VOLUME", "shire");
+    cData.addMetadata("owner)", "bilbo");
+    containerManager.createContainer(pipeline, cData);
+    ChunkInfo info = getChunk(keyName, 0, 0, datalen);
+    byte[] data = getData(datalen);
+    setDataChecksum(info, data);
+    chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+    chunkManager.deleteChunk(pipeline, keyName, info);
+    exception.expect(StorageContainerException.class);
+    exception.expectMessage("Unable to find the chunk file.");
+    chunkManager.readChunk(pipeline, keyName, info);
+  }
+
+  /**
+   * Tests a put key and read key.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testPutKey() throws IOException, NoSuchAlgorithmException {
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+    ChunkInfo info = writeChunkHelper(containerName, keyName, pipeline);
+    KeyData keyData = new KeyData(containerName, keyName);
+    List<ContainerProtos.ChunkInfo> chunkList = new LinkedList<>();
+    chunkList.add(info.getProtoBufMessage());
+    keyData.setChunks(chunkList);
+    keyManager.putKey(pipeline, keyData);
+    KeyData readKeyData = keyManager.getKey(keyData);
+    ChunkInfo readChunk =
+        ChunkInfo.getFromProtoBuf(readKeyData.getChunks().get(0));
+    Assert.assertEquals(info.getChecksum(), readChunk.getChecksum());
+  }
+
+  /**
+   * Tests a put key and read key.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testPutKeyWithLotsOfChunks() throws IOException,
+      NoSuchAlgorithmException {
+    final int chunkCount = 2;
+    final int datalen = 1024;
+    long totalSize = 0L;
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+    List<ChunkInfo> chunkList = new LinkedList<>();
+    ChunkInfo info = writeChunkHelper(containerName, keyName, pipeline);
+    totalSize += datalen;
+    chunkList.add(info);
+    for (int x = 1; x < chunkCount; x++) {
+      // with holes in the front (before x * datalen)
+      info = getChunk(keyName, x, x * datalen, datalen);
+      byte[] data = getData(datalen);
+      setDataChecksum(info, data);
+      chunkManager.writeChunk(pipeline, keyName, info, data, COMBINED);
+      totalSize += datalen * (x + 1);
+      chunkList.add(info);
+    }
+
+    long bytesUsed = containerManager.getBytesUsed(containerName);
+    Assert.assertEquals(totalSize, bytesUsed);
+    long writeBytes = containerManager.getWriteBytes(containerName);
+    Assert.assertEquals(chunkCount * datalen, writeBytes);
+    long readCount = containerManager.getReadCount(containerName);
+    Assert.assertEquals(0, readCount);
+    long writeCount = containerManager.getWriteCount(containerName);
+    Assert.assertEquals(chunkCount, writeCount);
+
+    KeyData keyData = new KeyData(containerName, keyName);
+    List<ContainerProtos.ChunkInfo> chunkProtoList = new LinkedList<>();
+    for (ChunkInfo i : chunkList) {
+      chunkProtoList.add(i.getProtoBufMessage());
+    }
+    keyData.setChunks(chunkProtoList);
+    keyManager.putKey(pipeline, keyData);
+    KeyData readKeyData = keyManager.getKey(keyData);
+    ChunkInfo lastChunk = chunkList.get(chunkList.size() - 1);
+    ChunkInfo readChunk =
+        ChunkInfo.getFromProtoBuf(readKeyData.getChunks().get(readKeyData
+            .getChunks().size() - 1));
+    Assert.assertEquals(lastChunk.getChecksum(), readChunk.getChecksum());
+  }
+
+  /**
+   * Deletes a key and tries to read it back.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testDeleteKey() throws IOException, NoSuchAlgorithmException {
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+    ChunkInfo info = writeChunkHelper(containerName, keyName, pipeline);
+    KeyData keyData = new KeyData(containerName, keyName);
+    List<ContainerProtos.ChunkInfo> chunkList = new LinkedList<>();
+    chunkList.add(info.getProtoBufMessage());
+    keyData.setChunks(chunkList);
+    keyManager.putKey(pipeline, keyData);
+    keyManager.deleteKey(pipeline, keyName);
+    exception.expect(StorageContainerException.class);
+    exception.expectMessage("Unable to find the key.");
+    keyManager.getKey(keyData);
+  }
+
+  /**
+   * Tries to Deletes a key twice.
+   *
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testDeleteKeyTwice() throws IOException,
+      NoSuchAlgorithmException {
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+    ChunkInfo info = writeChunkHelper(containerName, keyName, pipeline);
+    KeyData keyData = new KeyData(containerName, keyName);
+    List<ContainerProtos.ChunkInfo> chunkList = new LinkedList<>();
+    chunkList.add(info.getProtoBufMessage());
+    keyData.setChunks(chunkList);
+    keyManager.putKey(pipeline, keyData);
+    keyManager.deleteKey(pipeline, keyName);
+    exception.expect(StorageContainerException.class);
+    exception.expectMessage("Unable to find the key.");
+    keyManager.deleteKey(pipeline, keyName);
+  }
+
+  /**
+   * Tries to update an existing and non-existing container.
+   * Verifies container map and persistent data both updated.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testUpdateContainer() throws IOException {
+    String containerName = OzoneUtils.getRequestID();
+    ContainerData data = new ContainerData(containerName, containerID++, conf);
+    data.addMetadata("VOLUME", "shire");
+    data.addMetadata("owner", "bilbo");
+
+    containerManager.createContainer(
+        createSingleNodePipeline(containerName),
+        data);
+
+    File orgContainerFile = containerManager.getContainerFile(data);
+    Assert.assertTrue(orgContainerFile.exists());
+
+    ContainerData newData = new ContainerData(containerName, containerID++,
+        conf);
+    newData.addMetadata("VOLUME", "shire_new");
+    newData.addMetadata("owner", "bilbo_new");
+
+    containerManager.updateContainer(
+        createSingleNodePipeline(containerName),
+        containerName,
+        newData, false);
+
+    Assert.assertEquals(1, containerManager.getContainerMap().size());
+    Assert.assertTrue(containerManager.getContainerMap()
+        .containsKey(containerName));
+
+    // Verify in-memory map
+    ContainerData actualNewData = containerManager.getContainerMap()
+        .get(containerName).getContainer();
+    Assert.assertEquals("shire_new",
+        actualNewData.getAllMetadata().get("VOLUME"));
+    Assert.assertEquals("bilbo_new",
+        actualNewData.getAllMetadata().get("owner"));
+
+    // Verify container data on disk
+    File newContainerFile = containerManager.getContainerFile(actualNewData);
+    Assert.assertTrue("Container file should exist.",
+        newContainerFile.exists());
+    Assert.assertEquals("Container file should be in same location.",
+        orgContainerFile.getAbsolutePath(),
+        newContainerFile.getAbsolutePath());
+
+    try (FileInputStream newIn = new FileInputStream(newContainerFile)) {
+      ContainerProtos.ContainerData actualContainerDataProto =
+          ContainerProtos.ContainerData.parseDelimitedFrom(newIn);
+      ContainerData actualContainerData = ContainerData
+          .getFromProtBuf(actualContainerDataProto, conf);
+      Assert.assertEquals("shire_new",
+          actualContainerData.getAllMetadata().get("VOLUME"));
+      Assert.assertEquals("bilbo_new",
+          actualContainerData.getAllMetadata().get("owner"));
+    }
+
+    // Test force update flag.
+    // Delete container file then try to update without force update flag.
+    FileUtil.fullyDelete(newContainerFile);
+    try {
+      containerManager.updateContainer(createSingleNodePipeline(containerName),
+          containerName, newData, false);
+    } catch (StorageContainerException ex) {
+      Assert.assertEquals("Container file not exists or "
+          + "corrupted. Name: " + containerName, ex.getMessage());
+    }
+
+    // Update with force flag, it should be success.
+    newData = new ContainerData(containerName, containerID++, conf);
+    newData.addMetadata("VOLUME", "shire_new_1");
+    newData.addMetadata("owner", "bilbo_new_1");
+    containerManager.updateContainer(createSingleNodePipeline(containerName),
+        containerName, newData, true);
+
+    // Verify in-memory map
+    actualNewData = containerManager.getContainerMap()
+        .get(containerName).getContainer();
+    Assert.assertEquals("shire_new_1",
+        actualNewData.getAllMetadata().get("VOLUME"));
+    Assert.assertEquals("bilbo_new_1",
+        actualNewData.getAllMetadata().get("owner"));
+
+    // Update a non-existing container
+    exception.expect(StorageContainerException.class);
+    exception.expectMessage("Container doesn't exist.");
+    containerManager.updateContainer(
+        createSingleNodePipeline("non_exist_container"),
+        "non_exist_container", newData, false);
+  }
+
+  private KeyData writeKeyHelper(Pipeline pipeline,
+      String containerName, String keyName)
+      throws IOException, NoSuchAlgorithmException {
+    ChunkInfo info = writeChunkHelper(containerName, keyName, pipeline);
+    KeyData keyData = new KeyData(containerName, keyName);
+    List<ContainerProtos.ChunkInfo> chunkList = new LinkedList<>();
+    chunkList.add(info.getProtoBufMessage());
+    keyData.setChunks(chunkList);
+    return keyData;
+  }
+
+  @Test
+  public void testListKey() throws Exception {
+    String containerName = "c0" + RandomStringUtils.randomAlphanumeric(10);
+    Pipeline pipeline = createSingleNodePipeline(containerName);
+    List<String> expectedKeys = new ArrayList<String>();
+    for (int i = 0; i < 10; i++) {
+      String keyName = "k" + i + "-" + UUID.randomUUID();
+      expectedKeys.add(keyName);
+      KeyData kd = writeKeyHelper(pipeline, containerName, keyName);
+      keyManager.putKey(pipeline, kd);
+    }
+
+    // List all keys
+    List<KeyData> result = keyManager.listKey(pipeline, null, null, 100);
+    Assert.assertEquals(10, result.size());
+
+    int index = 0;
+    for (int i = index; i < result.size(); i++) {
+      KeyData data = result.get(i);
+      Assert.assertEquals(containerName, data.getContainerName());
+      Assert.assertEquals(expectedKeys.get(i), data.getKeyName());
+      index++;
+    }
+
+    // List key with prefix
+    result = keyManager.listKey(pipeline, "k1", null, 100);
+    // There is only one key with prefix k1
+    Assert.assertEquals(1, result.size());
+    Assert.assertEquals(expectedKeys.get(1), result.get(0).getKeyName());
+
+
+    // List key with startKey filter
+    String k6 = expectedKeys.get(6);
+    result = keyManager.listKey(pipeline, null, k6, 100);
+
+    Assert.assertEquals(4, result.size());
+    for (int i = 6; i < 10; i++) {
+      Assert.assertEquals(expectedKeys.get(i),
+          result.get(i - 6).getKeyName());
+    }
+
+    // List key with both prefix and startKey filter
+    String k7 = expectedKeys.get(7);
+    result = keyManager.listKey(pipeline, "k3", k7, 100);
+    // k3 is after k7, enhance we get an empty result
+    Assert.assertTrue(result.isEmpty());
+
+    // Set a pretty small cap for the key count
+    result = keyManager.listKey(pipeline, null, null, 3);
+    Assert.assertEquals(3, result.size());
+    for (int i = 0; i < 3; i++) {
+      Assert.assertEquals(expectedKeys.get(i), result.get(i).getKeyName());
+    }
+
+    // Count must be >0
+    exception.expect(IllegalArgumentException.class);
+    exception.expectMessage("Count must be a positive number.");
+    keyManager.listKey(pipeline, null, null, -1);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java
new file mode 100644
index 0000000..0034e8e
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.client.ObjectStore;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneClientFactory;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB;
+import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * Test to behaviour of the datanode when recieve close container command.
+ */
+public class TestCloseContainerHandler {
+
+  @Test
+  public void test() throws IOException, TimeoutException, InterruptedException,
+      OzoneException {
+
+    //setup a cluster (1G free space is enough for a unit test)
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.set(OZONE_SCM_CONTAINER_SIZE_GB, "1");
+    MiniOzoneCluster cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(1).build();
+    cluster.waitForClusterToBeReady();
+
+    //the easiest way to create an open container is creating a key
+    OzoneClient client = OzoneClientFactory.getClient(conf);
+    ObjectStore objectStore = client.getObjectStore();
+    objectStore.createVolume("test");
+    objectStore.getVolume("test").createBucket("test");
+    OzoneOutputStream key = objectStore.getVolume("test").getBucket("test")
+        .createKey("test", 1024, ReplicationType.STAND_ALONE,
+            ReplicationFactor.ONE);
+    key.write("test".getBytes());
+    key.close();
+
+    //get the name of a valid container
+    KsmKeyArgs keyArgs =
+        new KsmKeyArgs.Builder().setVolumeName("test").setBucketName("test")
+            .setType(HddsProtos.ReplicationType.STAND_ALONE)
+            .setFactor(HddsProtos.ReplicationFactor.ONE).setDataSize(1024)
+            .setKeyName("test").build();
+
+    KsmKeyLocationInfo ksmKeyLocationInfo =
+        cluster.getKeySpaceManager().lookupKey(keyArgs).getKeyLocationVersions()
+            .get(0).getBlocksLatestVersionOnly().get(0);
+
+    String containerName = ksmKeyLocationInfo.getContainerName();
+
+    Assert.assertFalse(isContainerClosed(cluster, containerName));
+
+    DatanodeDetails datanodeDetails = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails();
+    //send the order to close the container
+    cluster.getStorageContainerManager().getScmNodeManager()
+        .addDatanodeCommand(datanodeDetails.getUuid(),
+            new CloseContainerCommand(containerName));
+
+    GenericTestUtils.waitFor(() -> isContainerClosed(cluster, containerName),
+            500,
+            5 * 1000);
+
+    //double check if it's really closed (waitFor also throws an exception)
+    Assert.assertTrue(isContainerClosed(cluster, containerName));
+  }
+
+  private Boolean isContainerClosed(MiniOzoneCluster cluster,
+      String containerName) {
+    ContainerData containerData;
+    try {
+      containerData = cluster.getHddsDatanodes().get(0)
+          .getDatanodeStateMachine().getContainer().getContainerManager()
+          .readContainer(containerName);
+      return !containerData.isOpen();
+    } catch (StorageContainerException e) {
+      throw new AssertionError(e);
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java
new file mode 100644
index 0000000..1565fbc
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.metrics;
+
+import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
+import static org.apache.hadoop.test.MetricsAsserts.assertQuantileGauges;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.mockito.Mockito.mock;
+
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.container.ContainerTestHelper;
+import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
+import org.apache.hadoop.ozone.container.common.impl.Dispatcher;
+import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.transport.server.XceiverServer;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.XceiverClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * Test for metrics published by storage containers.
+ */
+public class TestContainerMetrics {
+
+  @Test
+  public void testContainerMetrics() throws Exception {
+    XceiverServer server = null;
+    XceiverClient client = null;
+    String containerName = OzoneUtils.getRequestID();
+    String keyName = OzoneUtils.getRequestID();
+
+    try {
+      final int interval = 1;
+      Pipeline pipeline = ContainerTestHelper
+          .createSingleNodePipeline(containerName);
+      OzoneConfiguration conf = new OzoneConfiguration();
+      conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+          pipeline.getLeader().getContainerPort());
+      conf.setInt(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY,
+          interval);
+
+      // Since we are only testing for container metrics and we can just
+      // mock the ContainerManager and ChunkManager instances instead of
+      // starting the whole cluster.
+      ContainerManager containerManager = mock(ContainerManager.class);
+      ChunkManager chunkManager = mock(ChunkManager.class);
+      Mockito.doNothing().when(chunkManager).writeChunk(
+          Mockito.any(Pipeline.class), Mockito.anyString(),
+          Mockito.any(ChunkInfo.class), Mockito.any(byte[].class),
+          Mockito.any(ContainerProtos.Stage.class));
+
+      Mockito.doReturn(chunkManager).when(containerManager).getChunkManager();
+      Mockito.doReturn(true).when(containerManager).isOpen(containerName);
+
+      Dispatcher dispatcher = new Dispatcher(containerManager, conf);
+      dispatcher.init();
+      DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+      server = new XceiverServer(datanodeDetails, conf, dispatcher);
+      client = new XceiverClient(pipeline, conf);
+
+      server.start();
+      client.connect();
+
+      // Create container
+      ContainerCommandRequestProto request = ContainerTestHelper
+          .getCreateContainerRequest(containerName, pipeline);
+      ContainerCommandResponseProto response = client.sendCommand(request);
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS,
+          response.getResult());
+
+      // Write Chunk
+      ContainerProtos.ContainerCommandRequestProto writeChunkRequest =
+          ContainerTestHelper.getWriteChunkRequest(
+              pipeline, containerName, keyName, 1024);
+      response = client.sendCommand(writeChunkRequest);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS,
+          response.getResult());
+
+      MetricsRecordBuilder containerMetrics = getMetrics(
+          "StorageContainerMetrics");
+      assertCounter("NumOps", 2L, containerMetrics);
+      assertCounter("numCreateContainer", 1L, containerMetrics);
+      assertCounter("numWriteChunk", 1L, containerMetrics);
+      assertCounter("bytesWriteChunk", 1024L, containerMetrics);
+      assertCounter("LatencyWriteChunkNumOps", 1L, containerMetrics);
+
+      String sec = interval + "s";
+      Thread.sleep((interval + 1) * 1000);
+      assertQuantileGauges("WriteChunkNanos" + sec, containerMetrics);
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+      if (server != null) {
+        server.stop();
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
new file mode 100644
index 0000000..4a6ca1d
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
@@ -0,0 +1,587 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.ozoneimpl;
+
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.container.ContainerTestHelper;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.XceiverClient;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * Tests ozone containers.
+ */
+public class TestOzoneContainer {
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  @Test
+  public void testCreateOzoneContainer() throws Exception {
+    String containerName = OzoneUtils.getRequestID();
+    OzoneConfiguration conf = newOzoneConfiguration();
+    OzoneContainer container = null;
+    MiniOzoneCluster cluster = null;
+    try {
+      cluster = MiniOzoneCluster.newBuilder(conf).build();
+      cluster.waitForClusterToBeReady();
+      // We don't start Ozone Container via data node, we will do it
+      // independently in our test path.
+      Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline(
+          containerName);
+      conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+          pipeline.getLeader().getContainerPort());
+      conf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_IPC_RANDOM_PORT, false);
+      container = new OzoneContainer(TestUtils.getDatanodeDetails(), conf);
+      container.start();
+
+      XceiverClient client = new XceiverClient(pipeline, conf);
+      client.connect();
+      createContainerForTesting(client, containerName);
+    } finally {
+      if (container != null) {
+        container.stop();
+      }
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  static OzoneConfiguration newOzoneConfiguration() {
+    final OzoneConfiguration conf = new OzoneConfiguration();
+    ContainerTestHelper.setOzoneLocalStorageRoot(
+        TestOzoneContainer.class, conf);
+    return conf;
+  }
+
+  @Test
+  public void testOzoneContainerViaDataNode() throws Exception {
+    MiniOzoneCluster cluster = null;
+    try {
+      String containerName = OzoneUtils.getRequestID();
+      OzoneConfiguration conf = newOzoneConfiguration();
+
+      // Start ozone container Via Datanode create.
+
+      Pipeline pipeline =
+          ContainerTestHelper.createSingleNodePipeline(containerName);
+      conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+          pipeline.getLeader().getContainerPort());
+
+      cluster = MiniOzoneCluster.newBuilder(conf)
+          .setRandomContainerPort(false)
+          .build();
+      cluster.waitForClusterToBeReady();
+
+      // This client talks to ozone container via datanode.
+      XceiverClient client = new XceiverClient(pipeline, conf);
+
+      runTestOzoneContainerViaDataNode(containerName, client);
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  static void runTestOzoneContainerViaDataNode(
+      String containerName, XceiverClientSpi client) throws Exception {
+    ContainerProtos.ContainerCommandRequestProto
+        request, writeChunkRequest, putKeyRequest,
+        updateRequest1, updateRequest2;
+    ContainerProtos.ContainerCommandResponseProto response,
+        updateResponse1, updateResponse2;
+    try {
+      client.connect();
+
+      // Create container
+      createContainerForTesting(client, containerName);
+      writeChunkRequest = writeChunkForContainer(client, containerName, 1024);
+
+      // Read Chunk
+      request = ContainerTestHelper.getReadChunkRequest(writeChunkRequest
+          .getWriteChunk());
+
+      response = client.sendCommand(request);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+      // Put Key
+      putKeyRequest = ContainerTestHelper.getPutKeyRequest(writeChunkRequest
+              .getWriteChunk());
+
+
+      response = client.sendCommand(putKeyRequest);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert
+          .assertTrue(putKeyRequest.getTraceID().equals(response.getTraceID()));
+
+      // Get Key
+      request = ContainerTestHelper.getKeyRequest(putKeyRequest.getPutKey());
+      response = client.sendCommand(request);
+      ContainerTestHelper.verifyGetKey(request, response);
+
+
+      // Delete Key
+      request =
+          ContainerTestHelper.getDeleteKeyRequest(putKeyRequest.getPutKey());
+      response = client.sendCommand(request);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+      //Delete Chunk
+      request = ContainerTestHelper.getDeleteChunkRequest(writeChunkRequest
+          .getWriteChunk());
+
+      response = client.sendCommand(request);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+      //Update an existing container
+      Map<String, String> containerUpdate = new HashMap<String, String>();
+      containerUpdate.put("container_updated_key", "container_updated_value");
+      updateRequest1 = ContainerTestHelper.getUpdateContainerRequest(
+              containerName, containerUpdate);
+      updateResponse1 = client.sendCommand(updateRequest1);
+      Assert.assertNotNull(updateResponse1);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS,
+          response.getResult());
+
+      //Update an non-existing container
+      updateRequest2 = ContainerTestHelper.getUpdateContainerRequest(
+              "non_exist_container", containerUpdate);
+      updateResponse2 = client.sendCommand(updateRequest2);
+      Assert.assertEquals(ContainerProtos.Result.CONTAINER_NOT_FOUND,
+          updateResponse2.getResult());
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+    }
+  }
+
+  @Test
+  public void testBothGetandPutSmallFile() throws Exception {
+    MiniOzoneCluster cluster = null;
+    XceiverClient client = null;
+    try {
+      OzoneConfiguration conf = newOzoneConfiguration();
+
+      client = createClientForTesting(conf);
+      cluster = MiniOzoneCluster.newBuilder(conf)
+          .setRandomContainerPort(false)
+          .build();
+      cluster.waitForClusterToBeReady();
+      String containerName = client.getPipeline().getContainerName();
+
+      runTestBothGetandPutSmallFile(containerName, client);
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  static void runTestBothGetandPutSmallFile(
+      String containerName, XceiverClientSpi client) throws Exception {
+    try {
+      client.connect();
+
+      createContainerForTesting(client, containerName);
+
+      String keyName = OzoneUtils.getRequestID();
+      final ContainerProtos.ContainerCommandRequestProto smallFileRequest
+          = ContainerTestHelper.getWriteSmallFileRequest(
+          client.getPipeline(), containerName, keyName, 1024);
+      ContainerProtos.ContainerCommandResponseProto response
+          = client.sendCommand(smallFileRequest);
+      Assert.assertNotNull(response);
+      Assert.assertTrue(smallFileRequest.getTraceID()
+          .equals(response.getTraceID()));
+
+      final ContainerProtos.ContainerCommandRequestProto getSmallFileRequest
+          = ContainerTestHelper.getReadSmallFileRequest(
+          smallFileRequest.getPutSmallFile().getKey());
+      response = client.sendCommand(getSmallFileRequest);
+      Assert.assertArrayEquals(
+          smallFileRequest.getPutSmallFile().getData().toByteArray(),
+          response.getGetSmallFile().getData().getData().toByteArray());
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+    }
+  }
+
+
+
+  @Test
+  public void testCloseContainer() throws Exception {
+    MiniOzoneCluster cluster = null;
+    XceiverClient client = null;
+    ContainerProtos.ContainerCommandResponseProto response;
+    ContainerProtos.ContainerCommandRequestProto
+        writeChunkRequest, putKeyRequest, request;
+    try {
+
+      OzoneConfiguration conf = newOzoneConfiguration();
+
+      client = createClientForTesting(conf);
+      cluster = MiniOzoneCluster.newBuilder(conf)
+          .setRandomContainerPort(false)
+          .build();
+      cluster.waitForClusterToBeReady();
+      client.connect();
+
+      String containerName = client.getPipeline().getContainerName();
+      createContainerForTesting(client, containerName);
+      writeChunkRequest = writeChunkForContainer(client, containerName, 1024);
+
+
+      putKeyRequest = ContainerTestHelper.getPutKeyRequest(writeChunkRequest
+              .getWriteChunk());
+      // Put key before closing.
+      response = client.sendCommand(putKeyRequest);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS,
+          response.getResult());
+      Assert.assertTrue(
+          putKeyRequest.getTraceID().equals(response.getTraceID()));
+
+      // Close the contianer.
+      request = ContainerTestHelper.getCloseContainer(client.getPipeline());
+      response = client.sendCommand(request);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+
+      // Assert that none of the write  operations are working after close.
+
+      // Write chunks should fail now.
+
+      response = client.sendCommand(writeChunkRequest);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.CLOSED_CONTAINER_IO,
+          response.getResult());
+      Assert.assertTrue(
+          writeChunkRequest.getTraceID().equals(response.getTraceID()));
+
+      // Read chunk must work on a closed container.
+      request = ContainerTestHelper.getReadChunkRequest(writeChunkRequest
+          .getWriteChunk());
+      response = client.sendCommand(request);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+
+      // Put key will fail on a closed container.
+      response = client.sendCommand(putKeyRequest);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.CLOSED_CONTAINER_IO,
+          response.getResult());
+      Assert
+          .assertTrue(putKeyRequest.getTraceID().equals(response.getTraceID()));
+
+      // Get key must work on the closed container.
+      request = ContainerTestHelper.getKeyRequest(putKeyRequest.getPutKey());
+      response = client.sendCommand(request);
+      ContainerTestHelper.verifyGetKey(request, response);
+
+      // Delete Key must fail on a closed container.
+      request =
+          ContainerTestHelper.getDeleteKeyRequest(putKeyRequest.getPutKey());
+      response = client.sendCommand(request);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.CLOSED_CONTAINER_IO,
+          response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  @Test
+  public void testDeleteContainer() throws Exception {
+    MiniOzoneCluster cluster = null;
+    XceiverClient client = null;
+    ContainerProtos.ContainerCommandResponseProto response;
+    ContainerProtos.ContainerCommandRequestProto request,
+        writeChunkRequest, putKeyRequest;
+    try {
+      OzoneConfiguration conf = newOzoneConfiguration();
+
+      client = createClientForTesting(conf);
+      cluster = MiniOzoneCluster.newBuilder(conf)
+          .setRandomContainerPort(false)
+          .build();
+      cluster.waitForClusterToBeReady();
+      client.connect();
+
+      String containerName = client.getPipeline().getContainerName();
+      createContainerForTesting(client, containerName);
+      writeChunkRequest = writeChunkForContainer(client, containerName, 1024);
+
+      putKeyRequest = ContainerTestHelper.getPutKeyRequest(writeChunkRequest
+          .getWriteChunk());
+      // Put key before deleting.
+      response = client.sendCommand(putKeyRequest);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS,
+          response.getResult());
+      Assert.assertTrue(
+          putKeyRequest.getTraceID().equals(response.getTraceID()));
+
+      // Container cannot be deleted forcibly because
+      // the container is not closed.
+      request = ContainerTestHelper.getDeleteContainer(
+          client.getPipeline(), true);
+      response = client.sendCommand(request);
+
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.UNCLOSED_CONTAINER_IO,
+          response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+      // Close the container.
+      request = ContainerTestHelper.getCloseContainer(client.getPipeline());
+      response = client.sendCommand(request);
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+      // Container cannot be deleted because the container is not empty.
+      request = ContainerTestHelper.getDeleteContainer(
+          client.getPipeline(), false);
+      response = client.sendCommand(request);
+
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.ERROR_CONTAINER_NOT_EMPTY,
+          response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+      // Container can be deleted forcibly because
+      // it is closed and non-empty.
+      request = ContainerTestHelper.getDeleteContainer(
+          client.getPipeline(), true);
+      response = client.sendCommand(request);
+
+      Assert.assertNotNull(response);
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS,
+          response.getResult());
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+
+  // Runs a set of commands as Async calls and verifies that calls indeed worked
+  // as expected.
+  static void runAsyncTests(
+      String containerName, XceiverClientSpi client) throws Exception {
+    try {
+      client.connect();
+
+      createContainerForTesting(client, containerName);
+      final List<CompletableFuture> computeResults = new LinkedList<>();
+      int requestCount = 1000;
+      // Create a bunch of Async calls from this test.
+      for(int x = 0; x <requestCount; x++) {
+        String keyName = OzoneUtils.getRequestID();
+        final ContainerProtos.ContainerCommandRequestProto smallFileRequest
+            = ContainerTestHelper.getWriteSmallFileRequest(
+            client.getPipeline(), containerName, keyName, 1024);
+
+        CompletableFuture<ContainerProtos.ContainerCommandResponseProto>
+            response = client.sendCommandAsync(smallFileRequest);
+        computeResults.add(response);
+      }
+
+      CompletableFuture<Void> combinedFuture =
+          CompletableFuture.allOf(computeResults.toArray(
+              new CompletableFuture[computeResults.size()]));
+      // Wait for all futures to complete.
+      combinedFuture.get();
+      // Assert that all futures are indeed done.
+      for (CompletableFuture future : computeResults) {
+        Assert.assertTrue(future.isDone());
+      }
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+    }
+  }
+
+  @Test
+  public void testXcieverClientAsync() throws Exception {
+    MiniOzoneCluster cluster = null;
+    XceiverClient client = null;
+    try {
+      OzoneConfiguration conf = newOzoneConfiguration();
+
+      client = createClientForTesting(conf);
+      cluster = MiniOzoneCluster.newBuilder(conf)
+          .setRandomContainerPort(false)
+          .build();
+      cluster.waitForClusterToBeReady();
+      String containerName = client.getPipeline().getContainerName();
+      runAsyncTests(containerName, client);
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  @Test
+  public void testInvalidRequest() throws Exception {
+    MiniOzoneCluster cluster = null;
+    XceiverClient client;
+    ContainerProtos.ContainerCommandRequestProto request;
+    try {
+      OzoneConfiguration conf = newOzoneConfiguration();
+
+      client = createClientForTesting(conf);
+      cluster = MiniOzoneCluster.newBuilder(conf)
+              .setRandomContainerPort(false)
+              .build();
+      cluster.waitForClusterToBeReady();
+      client.connect();
+
+      // Send a request without traceId.
+      request = ContainerTestHelper
+          .getRequestWithoutTraceId(client.getPipeline());
+      client.sendCommand(request);
+      Assert.fail("IllegalArgumentException expected");
+    } catch(IllegalArgumentException iae){
+      GenericTestUtils.assertExceptionContains("Invalid trace ID", iae);
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+
+  private static XceiverClient createClientForTesting(OzoneConfiguration conf)
+      throws Exception {
+    String containerName = OzoneUtils.getRequestID();
+    // Start ozone container Via Datanode create.
+    Pipeline pipeline =
+        ContainerTestHelper.createSingleNodePipeline(containerName);
+    conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+        pipeline.getLeader().getContainerPort());
+
+    // This client talks to ozone container via datanode.
+    return new XceiverClient(pipeline, conf);
+  }
+
+  private static void createContainerForTesting(XceiverClientSpi client,
+      String containerName) throws Exception {
+    // Create container
+    ContainerProtos.ContainerCommandRequestProto request =
+        ContainerTestHelper.getCreateContainerRequest(containerName,
+            client.getPipeline());
+    ContainerProtos.ContainerCommandResponseProto response =
+        client.sendCommand(request);
+    Assert.assertNotNull(response);
+    Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+  }
+
+  private static ContainerProtos.ContainerCommandRequestProto
+      writeChunkForContainer(XceiverClientSpi client,
+      String containerName, int dataLen) throws Exception {
+    // Write Chunk
+    final String keyName = OzoneUtils.getRequestID();
+    ContainerProtos.ContainerCommandRequestProto writeChunkRequest =
+        ContainerTestHelper.getWriteChunkRequest(client.getPipeline(),
+            containerName, keyName, dataLen);
+
+    ContainerProtos.ContainerCommandResponseProto response =
+        client.sendCommand(writeChunkRequest);
+    Assert.assertNotNull(response);
+    Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+    Assert.assertTrue(response.getTraceID().equals(response.getTraceID()));
+    return writeChunkRequest;
+  }
+
+  static void runRequestWithoutTraceId(
+          String containerName, XceiverClientSpi client) throws Exception {
+    try {
+      client.connect();
+
+      createContainerForTesting(client, containerName);
+
+      String keyName = OzoneUtils.getRequestID();
+      final ContainerProtos.ContainerCommandRequestProto smallFileRequest
+              = ContainerTestHelper.getWriteSmallFileRequest(
+              client.getPipeline(), containerName, keyName, 1024);
+
+      ContainerProtos.ContainerCommandResponseProto response
+              = client.sendCommand(smallFileRequest);
+      Assert.assertNotNull(response);
+      Assert.assertTrue(smallFileRequest.getTraceID()
+              .equals(response.getTraceID()));
+
+
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+    }
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java
new file mode 100644
index 0000000..9c10b28
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.ozoneimpl;
+
+import org.apache.hadoop.ozone.HddsDatanodeService;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.RatisTestHelper;
+import org.apache.hadoop.ozone.container.ContainerTestHelper;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.ratis.rpc.RpcType;
+import org.apache.ratis.rpc.SupportedRpcType;
+import org.apache.ratis.util.CheckedBiConsumer;
+import org.apache.ratis.util.CollectionUtils;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+/**
+ * Tests ozone containers with Apache Ratis.
+ */
+@Ignore("Disabling Ratis tests for pipeline work.")
+public class TestOzoneContainerRatis {
+  private static final Logger LOG = LoggerFactory.getLogger(
+      TestOzoneContainerRatis.class);
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  static OzoneConfiguration newOzoneConfiguration() {
+    final OzoneConfiguration conf = new OzoneConfiguration();
+    ContainerTestHelper.setOzoneLocalStorageRoot(
+        TestOzoneContainerRatis.class, conf);
+    return conf;
+  }
+
+  private static void runTestOzoneContainerViaDataNodeRatis(
+      RpcType rpc, int numNodes) throws Exception {
+    runTest("runTestOzoneContainerViaDataNodeRatis", rpc, numNodes,
+        TestOzoneContainer::runTestOzoneContainerViaDataNode);
+  }
+
+  private static void runTest(
+      String testName, RpcType rpc, int numNodes,
+      CheckedBiConsumer<String, XceiverClientSpi, Exception> test)
+      throws Exception {
+    LOG.info(testName + "(rpc=" + rpc + ", numNodes=" + numNodes);
+
+    // create Ozone clusters
+    final OzoneConfiguration conf = newOzoneConfiguration();
+    RatisTestHelper.initRatisConf(rpc, conf);
+    final MiniOzoneCluster cluster =
+        MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(numNodes)
+        .build();
+    try {
+      cluster.waitForClusterToBeReady();
+
+      final String containerName = OzoneUtils.getRequestID();
+      final List<HddsDatanodeService> datanodes = cluster.getHddsDatanodes();
+      final Pipeline pipeline = ContainerTestHelper.createPipeline(
+          containerName,
+          CollectionUtils.as(datanodes,
+              HddsDatanodeService::getDatanodeDetails));
+      LOG.info("pipeline=" + pipeline);
+
+      // Create Ratis cluster
+//      final String ratisId = "ratis1";
+//      final PipelineManager manager = RatisManagerImpl.newRatisManager(conf);
+//      manager.createPipeline(ratisId, pipeline.getMachines());
+//      LOG.info("Created RatisCluster " + ratisId);
+//
+//      // check Ratis cluster members
+//      final List<DatanodeDetails> dns = manager.getMembers(ratisId);
+//      Assert.assertEquals(pipeline.getMachines(), dns);
+//
+//      // run test
+//      final XceiverClientSpi client = XceiverClientRatis
+// .newXceiverClientRatis(
+//          pipeline, conf);
+//      test.accept(containerName, client);
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+  private static void runTestBothGetandPutSmallFileRatis(
+      RpcType rpc, int numNodes) throws Exception {
+    runTest("runTestBothGetandPutSmallFileRatis", rpc, numNodes,
+        TestOzoneContainer::runTestBothGetandPutSmallFile);
+  }
+
+  @Test
+  public void testOzoneContainerViaDataNodeRatisGrpc() throws Exception {
+    runTestOzoneContainerViaDataNodeRatis(SupportedRpcType.GRPC, 1);
+    runTestOzoneContainerViaDataNodeRatis(SupportedRpcType.GRPC, 3);
+  }
+
+  @Test
+  public void testOzoneContainerViaDataNodeRatisNetty() throws Exception {
+    runTestOzoneContainerViaDataNodeRatis(SupportedRpcType.NETTY, 1);
+    runTestOzoneContainerViaDataNodeRatis(SupportedRpcType.NETTY, 3);
+  }
+
+  @Test
+  public void testBothGetandPutSmallFileRatisNetty() throws Exception {
+    runTestBothGetandPutSmallFileRatis(SupportedRpcType.NETTY, 1);
+    runTestBothGetandPutSmallFileRatis(SupportedRpcType.NETTY, 3);
+  }
+
+  @Test
+  public void testBothGetandPutSmallFileRatisGrpc() throws Exception {
+    runTestBothGetandPutSmallFileRatis(SupportedRpcType.GRPC, 1);
+    runTestBothGetandPutSmallFileRatis(SupportedRpcType.GRPC, 3);
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java
new file mode 100644
index 0000000..12c2b7b
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.ozoneimpl;
+
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.HddsDatanodeService;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.RatisTestHelper;
+import org.apache.hadoop.ozone.container.ContainerTestHelper;
+import org.apache.ratis.rpc.RpcType;
+import org.apache.ratis.rpc.SupportedRpcType;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.stream.Collectors;
+
+/**
+ * Tests ozone containers with Apache Ratis.
+ */
+@Ignore("Disabling Ratis tests for pipeline work.")
+public class TestRatisManager {
+  private static final Logger LOG = LoggerFactory.getLogger(
+      TestRatisManager.class);
+
+  static OzoneConfiguration newOzoneConfiguration() {
+    final OzoneConfiguration conf = new OzoneConfiguration();
+    ContainerTestHelper.setOzoneLocalStorageRoot(
+        TestRatisManager.class, conf);
+    return conf;
+  }
+
+
+  /** Set the timeout for every test. */
+  @Rule
+  public Timeout testTimeout = new Timeout(200_000);
+
+  @Test
+  public void testTestRatisManagerGrpc() throws Exception {
+    runTestRatisManager(SupportedRpcType.GRPC);
+  }
+
+  @Test
+  public void testTestRatisManagerNetty() throws Exception {
+    runTestRatisManager(SupportedRpcType.NETTY);
+  }
+
+  private static void runTestRatisManager(RpcType rpc) throws Exception {
+    LOG.info("runTestRatisManager, rpc=" + rpc);
+
+    // create Ozone clusters
+    final OzoneConfiguration conf = newOzoneConfiguration();
+    RatisTestHelper.initRatisConf(rpc, conf);
+    final MiniOzoneCluster cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(5)
+        .build();
+    try {
+      cluster.waitForClusterToBeReady();
+
+      final List<HddsDatanodeService> datanodes = cluster.getHddsDatanodes();
+      final List<DatanodeDetails> datanodeDetailsSet = datanodes.stream()
+          .map(HddsDatanodeService::getDatanodeDetails).collect(
+              Collectors.toList());
+
+      //final RatisManager manager = RatisManager.newRatisManager(conf);
+
+      final int[] idIndex = {3, 4, 5};
+      for (int i = 0; i < idIndex.length; i++) {
+        final int previous = i == 0 ? 0 : idIndex[i - 1];
+        final List<DatanodeDetails> subIds = datanodeDetailsSet.subList(
+            previous, idIndex[i]);
+
+        // Create Ratis cluster
+        final String ratisId = "ratis" + i;
+        //manager.createRatisCluster(ratisId, subIds);
+        LOG.info("Created RatisCluster " + ratisId);
+
+        // check Ratis cluster members
+        //final List<DatanodeDetails> dns = manager.getMembers(ratisId);
+        //Assert.assertEquals(subIds, dns);
+      }
+
+      // randomly close two of the clusters
+      final int chosen = ThreadLocalRandom.current().nextInt(idIndex.length);
+      LOG.info("chosen = " + chosen);
+
+      for (int i = 0; i < idIndex.length; i++) {
+        if (i != chosen) {
+          final String ratisId = "ratis" + i;
+          //manager.closeRatisCluster(ratisId);
+        }
+      }
+
+      // update datanodes
+      final String ratisId = "ratis" + chosen;
+      //manager.updatePipeline(ratisId, allIds);
+
+      // check Ratis cluster members
+      //final List<DatanodeDetails> dns = manager.getMembers(ratisId);
+      //Assert.assertEquals(allIds, dns);
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java
new file mode 100644
index 0000000..651b10f
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.container.server;
+
+import io.netty.channel.embedded.EmbeddedChannel;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
+
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.RatisTestHelper;
+import org.apache.hadoop.ozone.container.ContainerTestHelper;
+import org.apache.hadoop.ozone.container.common.impl.Dispatcher;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.container.common.transport.server.XceiverServer;
+import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerHandler;
+import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerSpi;
+import org.apache.hadoop.ozone.container.common.transport.server.ratis.XceiverServerRatis;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.XceiverClient;
+import org.apache.hadoop.hdds.scm.XceiverClientRatis;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.ratis.RatisHelper;
+import org.apache.ratis.client.RaftClient;
+import org.apache.ratis.protocol.RaftPeer;
+import org.apache.ratis.rpc.RpcType;
+import org.apache.ratis.util.CheckedBiConsumer;
+import org.junit.Assert;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.BiConsumer;
+
+import static org.apache.ratis.rpc.SupportedRpcType.GRPC;
+import static org.apache.ratis.rpc.SupportedRpcType.NETTY;
+import static org.mockito.Mockito.mock;
+
+/**
+ * Test Containers.
+ */
+@Ignore("Takes too long to run this test. Ignoring for time being.")
+public class TestContainerServer {
+  static final String TEST_DIR
+      = GenericTestUtils.getTestDir("dfs").getAbsolutePath() + File.separator;
+
+  @Test
+  public void testPipeline() throws IOException {
+    EmbeddedChannel channel = null;
+    String containerName = OzoneUtils.getRequestID();
+    try {
+      channel = new EmbeddedChannel(new XceiverServerHandler(
+          new TestContainerDispatcher()));
+      ContainerCommandRequestProto request =
+          ContainerTestHelper.getCreateContainerRequest(containerName,
+              ContainerTestHelper.createSingleNodePipeline(containerName));
+      channel.writeInbound(request);
+      Assert.assertTrue(channel.finish());
+
+      Object responseObject = channel.readOutbound();
+      Assert.assertTrue(responseObject instanceof
+          ContainerCommandResponseProto);
+      ContainerCommandResponseProto  response =
+          (ContainerCommandResponseProto) responseObject;
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+    } finally {
+      if (channel != null) {
+        channel.close();
+      }
+    }
+  }
+
+  @Test
+  public void testClientServer() throws Exception {
+    DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+    runTestClientServer(1,
+        (pipeline, conf) -> conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+            pipeline.getLeader().getContainerPort()),
+        XceiverClient::new,
+        (dn, conf) -> new XceiverServer(datanodeDetails, conf,
+            new TestContainerDispatcher()),
+        (dn, p) -> {});
+  }
+
+  @FunctionalInterface
+  interface CheckedBiFunction<LEFT, RIGHT, OUT, THROWABLE extends Throwable> {
+    OUT apply(LEFT left, RIGHT right) throws THROWABLE;
+  }
+
+  @Test
+  public void testClientServerRatisNetty() throws Exception {
+    runTestClientServerRatis(NETTY, 1);
+    runTestClientServerRatis(NETTY, 3);
+  }
+
+  @Test
+  public void testClientServerRatisGrpc() throws Exception {
+    runTestClientServerRatis(GRPC, 1);
+    runTestClientServerRatis(GRPC, 3);
+  }
+
+  static XceiverServerRatis newXceiverServerRatis(
+      DatanodeDetails dn, OzoneConfiguration conf) throws IOException {
+    conf.setInt(OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_PORT,
+        dn.getRatisPort());
+    final String dir = TEST_DIR + dn.getUuid();
+    conf.set(OzoneConfigKeys.DFS_CONTAINER_RATIS_DATANODE_STORAGE_DIR, dir);
+
+    final ContainerDispatcher dispatcher = new TestContainerDispatcher();
+    return XceiverServerRatis.newXceiverServerRatis(dn, conf, dispatcher);
+  }
+
+  static void initXceiverServerRatis(
+      RpcType rpc, DatanodeDetails dd, Pipeline pipeline) throws IOException {
+    final RaftPeer p = RatisHelper.toRaftPeer(dd);
+    final RaftClient client = RatisHelper.newRaftClient(rpc, p);
+    client.reinitialize(RatisHelper.newRaftGroup(pipeline), p.getId());
+  }
+
+
+  static void runTestClientServerRatis(RpcType rpc, int numNodes)
+      throws Exception {
+    runTestClientServer(numNodes,
+        (pipeline, conf) -> RatisTestHelper.initRatisConf(rpc, conf),
+        XceiverClientRatis::newXceiverClientRatis,
+        TestContainerServer::newXceiverServerRatis,
+        (dn, p) -> initXceiverServerRatis(rpc, dn, p));
+  }
+
+  static void runTestClientServer(
+      int numDatanodes,
+      BiConsumer<Pipeline, OzoneConfiguration> initConf,
+      CheckedBiFunction<Pipeline, OzoneConfiguration, XceiverClientSpi,
+          IOException> createClient,
+      CheckedBiFunction<DatanodeDetails, OzoneConfiguration, XceiverServerSpi,
+          IOException> createServer,
+      CheckedBiConsumer<DatanodeDetails, Pipeline, IOException> initServer)
+      throws Exception {
+    final List<XceiverServerSpi> servers = new ArrayList<>();
+    XceiverClientSpi client = null;
+    String containerName = OzoneUtils.getRequestID();
+    try {
+      final Pipeline pipeline = ContainerTestHelper.createPipeline(
+          containerName, numDatanodes);
+      final OzoneConfiguration conf = new OzoneConfiguration();
+      initConf.accept(pipeline, conf);
+
+      for(DatanodeDetails dn : pipeline.getMachines()) {
+        final XceiverServerSpi s = createServer.apply(dn, conf);
+        servers.add(s);
+        s.start();
+        initServer.accept(dn, pipeline);
+      }
+
+      client = createClient.apply(pipeline, conf);
+      client.connect();
+
+      final ContainerCommandRequestProto request =
+          ContainerTestHelper
+              .getCreateContainerRequest(containerName, pipeline);
+      Assert.assertNotNull(request.getTraceID());
+
+      ContainerCommandResponseProto response = client.sendCommand(request);
+      Assert.assertEquals(request.getTraceID(), response.getTraceID());
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+      servers.stream().forEach(XceiverServerSpi::stop);
+    }
+  }
+
+  @Test
+  public void testClientServerWithContainerDispatcher() throws Exception {
+    XceiverServer server = null;
+    XceiverClient client = null;
+    String containerName = OzoneUtils.getRequestID();
+
+    try {
+      Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline(
+          containerName);
+      OzoneConfiguration conf = new OzoneConfiguration();
+      conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
+          pipeline.getLeader().getContainerPort());
+
+      Dispatcher dispatcher =
+              new Dispatcher(mock(ContainerManager.class), conf);
+      dispatcher.init();
+      DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+      server = new XceiverServer(datanodeDetails, conf, dispatcher);
+      client = new XceiverClient(pipeline, conf);
+
+      server.start();
+      client.connect();
+
+      ContainerCommandRequestProto request =
+          ContainerTestHelper.getCreateContainerRequest(containerName,
+              pipeline);
+      ContainerCommandResponseProto response = client.sendCommand(request);
+      Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
+      Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
+      Assert.assertTrue(dispatcher.
+                          getContainerMetrics().
+                            getContainerOpsMetrics(
+                              ContainerProtos.Type.CreateContainer)== 1);
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+      if (server != null) {
+        server.stop();
+      }
+    }
+  }
+
+  private static class TestContainerDispatcher implements ContainerDispatcher {
+    /**
+     * Dispatches commands to container layer.
+     *
+     * @param msg - Command Request
+     * @return Command Response
+     */
+    @Override
+    public ContainerCommandResponseProto
+        dispatch(ContainerCommandRequestProto msg)  {
+      return ContainerTestHelper.getCreateContainerResponse(msg);
+    }
+
+    @Override
+    public void init() {
+    }
+
+    @Override
+    public void shutdown() {
+    }
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/TestDataValidate.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/TestDataValidate.java
new file mode 100644
index 0000000..8b8072c
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/TestDataValidate.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.freon;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Tests Freon, with MiniOzoneCluster and validate data.
+ */
+public class TestDataValidate {
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(5).build();
+    cluster.waitForClusterToBeReady();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void ratisTestLargeKey() throws Exception {
+    List<String> args = new ArrayList<>();
+    args.add("-validateWrites");
+    args.add("-numOfVolumes");
+    args.add("1");
+    args.add("-numOfBuckets");
+    args.add("1");
+    args.add("-numOfKeys");
+    args.add("1");
+    args.add("-ratis");
+    args.add("3");
+    args.add("-keySize");
+    args.add("104857600");
+    Freon freon = new Freon(conf);
+    int res = ToolRunner.run(conf, freon,
+        args.toArray(new String[0]));
+    Assert.assertEquals(1, freon.getNumberOfVolumesCreated());
+    Assert.assertEquals(1, freon.getNumberOfBucketsCreated());
+    Assert.assertEquals(1, freon.getNumberOfKeysAdded());
+    Assert.assertEquals(0, freon.getUnsuccessfulValidationCount());
+    Assert.assertEquals(0, res);
+  }
+
+  @Test
+  public void standaloneTestLargeKey() throws Exception {
+    List<String> args = new ArrayList<>();
+    args.add("-validateWrites");
+    args.add("-numOfVolumes");
+    args.add("1");
+    args.add("-numOfBuckets");
+    args.add("1");
+    args.add("-numOfKeys");
+    args.add("1");
+    args.add("-keySize");
+    args.add("104857600");
+    Freon freon = new Freon(conf);
+    int res = ToolRunner.run(conf, freon,
+        args.toArray(new String[0]));
+    Assert.assertEquals(1, freon.getNumberOfVolumesCreated());
+    Assert.assertEquals(1, freon.getNumberOfBucketsCreated());
+    Assert.assertEquals(1, freon.getNumberOfKeysAdded());
+    Assert.assertEquals(0, freon.getUnsuccessfulValidationCount());
+    Assert.assertEquals(0, res);
+  }
+
+  @Test
+  public void validateWriteTest() throws Exception {
+    PrintStream originalStream = System.out;
+    ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+    System.setOut(new PrintStream(outStream));
+    List<String> args = new ArrayList<>();
+    args.add("-validateWrites");
+    args.add("-numOfVolumes");
+    args.add("2");
+    args.add("-numOfBuckets");
+    args.add("5");
+    args.add("-numOfKeys");
+    args.add("10");
+    Freon freon = new Freon(conf);
+    int res = ToolRunner.run(conf, freon,
+        args.toArray(new String[0]));
+    Assert.assertEquals(0, res);
+    Assert.assertEquals(2, freon.getNumberOfVolumesCreated());
+    Assert.assertEquals(10, freon.getNumberOfBucketsCreated());
+    Assert.assertEquals(100, freon.getNumberOfKeysAdded());
+    Assert.assertTrue(freon.getValidateWrites());
+    Assert.assertNotEquals(0, freon.getTotalKeysValidated());
+    Assert.assertNotEquals(0, freon.getSuccessfulValidationCount());
+    Assert.assertEquals(0, freon.getUnsuccessfulValidationCount());
+    System.setOut(originalStream);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/TestFreon.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/TestFreon.java
new file mode 100644
index 0000000..203cf4d
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/TestFreon.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.freon;
+
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Tests Freon, with MiniOzoneCluster.
+ */
+public class TestFreon {
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(5).build();
+    cluster.waitForClusterToBeReady();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void defaultTest() throws Exception {
+    List<String> args = new ArrayList<>();
+    args.add("-numOfVolumes");
+    args.add("2");
+    args.add("-numOfBuckets");
+    args.add("5");
+    args.add("-numOfKeys");
+    args.add("10");
+    Freon freon = new Freon(conf);
+    int res = ToolRunner.run(conf, freon,
+        args.toArray(new String[0]));
+    Assert.assertEquals(2, freon.getNumberOfVolumesCreated());
+    Assert.assertEquals(10, freon.getNumberOfBucketsCreated());
+    Assert.assertEquals(100, freon.getNumberOfKeysAdded());
+    Assert.assertEquals(10240 - 36, freon.getKeyValueLength());
+    Assert.assertEquals(0, res);
+  }
+
+  @Test
+  public void multiThread() throws Exception {
+    List<String> args = new ArrayList<>();
+    args.add("-numOfVolumes");
+    args.add("10");
+    args.add("-numOfBuckets");
+    args.add("1");
+    args.add("-numOfKeys");
+    args.add("10");
+    args.add("-numOfThread");
+    args.add("10");
+    args.add("-keySize");
+    args.add("10240");
+    Freon freon = new Freon(conf);
+    int res = ToolRunner.run(conf, freon,
+        args.toArray(new String[0]));
+    Assert.assertEquals(10, freon.getNumberOfVolumesCreated());
+    Assert.assertEquals(10, freon.getNumberOfBucketsCreated());
+    Assert.assertEquals(100, freon.getNumberOfKeysAdded());
+    Assert.assertEquals(0, res);
+  }
+
+  @Test
+  public void ratisTest3() throws Exception {
+    List<String> args = new ArrayList<>();
+    args.add("-numOfVolumes");
+    args.add("10");
+    args.add("-numOfBuckets");
+    args.add("1");
+    args.add("-numOfKeys");
+    args.add("10");
+    args.add("-ratis");
+    args.add("3");
+    args.add("-numOfThread");
+    args.add("10");
+    args.add("-keySize");
+    args.add("10240");
+    Freon freon = new Freon(conf);
+    int res = ToolRunner.run(conf, freon,
+        args.toArray(new String[0]));
+    Assert.assertEquals(10, freon.getNumberOfVolumesCreated());
+    Assert.assertEquals(10, freon.getNumberOfBucketsCreated());
+    Assert.assertEquals(100, freon.getNumberOfKeysAdded());
+    Assert.assertEquals(0, res);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/package-info.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/package-info.java
new file mode 100644
index 0000000..13d86ab
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/freon/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.freon;
+/**
+ * Classes related to Ozone tools tests.
+ */
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestContainerReportWithKeys.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestContainerReportWithKeys.java
new file mode 100644
index 0000000..ad5783e
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestContainerReportWithKeys.java
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.commons.lang.RandomStringUtils;
+
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.*;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+/**
+ * This class tests container report with DN container state info.
+ */
+public class TestContainerReportWithKeys {
+  private static final Logger LOG = LoggerFactory.getLogger(
+      TestContainerReportWithKeys.class);
+  private static MiniOzoneCluster cluster = null;
+  private static OzoneConfiguration conf;
+  private static StorageContainerManager scm;
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    scm = cluster.getStorageContainerManager();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testContainerReportKeyWrite() throws Exception {
+    final String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    final String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    final String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    final int keySize = 100;
+
+    OzoneClient client = OzoneClientFactory.getClient(conf);
+    ObjectStore objectStore = client.getObjectStore();
+    objectStore.createVolume(volumeName);
+    objectStore.getVolume(volumeName).createBucket(bucketName);
+    OzoneOutputStream key =
+        objectStore.getVolume(volumeName).getBucket(bucketName)
+            .createKey(keyName, keySize, ReplicationType.STAND_ALONE,
+                ReplicationFactor.ONE);
+    String dataString = RandomStringUtils.randomAlphabetic(keySize);
+    key.write(dataString.getBytes());
+    key.close();
+
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .setType(HddsProtos.ReplicationType.STAND_ALONE)
+        .setFactor(HddsProtos.ReplicationFactor.ONE).setDataSize(keySize)
+        .build();
+
+
+    KsmKeyLocationInfo keyInfo =
+        cluster.getKeySpaceManager().lookupKey(keyArgs).getKeyLocationVersions()
+            .get(0).getBlocksLatestVersionOnly().get(0);
+
+    ContainerData cd = getContainerData(keyInfo.getContainerName());
+
+    LOG.info("DN Container Data:  keyCount: {} used: {} ",
+        cd.getKeyCount(), cd.getBytesUsed());
+
+    ContainerInfo cinfo = scm.getContainerInfo(keyInfo.getContainerName());
+
+    LOG.info("SCM Container Info keyCount: {} usedBytes: {}",
+        cinfo.getNumberOfKeys(), cinfo.getUsedBytes());
+  }
+
+
+  private static ContainerData getContainerData(String containerName) {
+    ContainerData containerData;
+    try {
+      ContainerManager containerManager = cluster.getHddsDatanodes().get(0)
+          .getDatanodeStateMachine().getContainer().getContainerManager();
+      containerData = containerManager.readContainer(containerName);
+    } catch (StorageContainerException e) {
+      throw new AssertionError(e);
+    }
+    return containerData;
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKSMMetrcis.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKSMMetrcis.java
new file mode 100644
index 0000000..60d690f
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKSMMetrcis.java
@@ -0,0 +1,306 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+
+import java.io.IOException;
+
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.mockito.internal.util.reflection.Whitebox;
+
+/**
+ * Test for KSM metrics.
+ */
+public class TestKSMMetrcis {
+  private MiniOzoneCluster cluster;
+  private KeySpaceManager ksmManager;
+
+  /**
+   * The exception used for testing failure metrics.
+   */
+  private IOException exception = new IOException();
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   *
+   * @throws IOException
+   */
+  @Before
+  public void setup() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    ksmManager = cluster.getKeySpaceManager();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @After
+  public void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testVolumeOps() throws IOException {
+    VolumeManager volumeManager = (VolumeManager) Whitebox
+        .getInternalState(ksmManager, "volumeManager");
+    VolumeManager mockVm = Mockito.spy(volumeManager);
+
+    Mockito.doNothing().when(mockVm).createVolume(null);
+    Mockito.doNothing().when(mockVm).deleteVolume(null);
+    Mockito.doReturn(null).when(mockVm).getVolumeInfo(null);
+    Mockito.doReturn(true).when(mockVm).checkVolumeAccess(null, null);
+    Mockito.doNothing().when(mockVm).setOwner(null, null);
+    Mockito.doReturn(null).when(mockVm).listVolumes(null, null, null, 0);
+
+    Whitebox.setInternalState(ksmManager, "volumeManager", mockVm);
+    doVolumeOps();
+
+    MetricsRecordBuilder ksmMetrics = getMetrics("KSMMetrics");
+    assertCounter("NumVolumeOps", 6L, ksmMetrics);
+    assertCounter("NumVolumeCreates", 1L, ksmMetrics);
+    assertCounter("NumVolumeUpdates", 1L, ksmMetrics);
+    assertCounter("NumVolumeInfos", 1L, ksmMetrics);
+    assertCounter("NumVolumeCheckAccesses", 1L, ksmMetrics);
+    assertCounter("NumVolumeDeletes", 1L, ksmMetrics);
+    assertCounter("NumVolumeLists", 1L, ksmMetrics);
+
+    // inject exception to test for Failure Metrics
+    Mockito.doThrow(exception).when(mockVm).createVolume(null);
+    Mockito.doThrow(exception).when(mockVm).deleteVolume(null);
+    Mockito.doThrow(exception).when(mockVm).getVolumeInfo(null);
+    Mockito.doThrow(exception).when(mockVm).checkVolumeAccess(null, null);
+    Mockito.doThrow(exception).when(mockVm).setOwner(null, null);
+    Mockito.doThrow(exception).when(mockVm).listVolumes(null, null, null, 0);
+
+    Whitebox.setInternalState(ksmManager, "volumeManager", mockVm);
+    doVolumeOps();
+
+    ksmMetrics = getMetrics("KSMMetrics");
+    assertCounter("NumVolumeOps", 12L, ksmMetrics);
+    assertCounter("NumVolumeCreates", 2L, ksmMetrics);
+    assertCounter("NumVolumeUpdates", 2L, ksmMetrics);
+    assertCounter("NumVolumeInfos", 2L, ksmMetrics);
+    assertCounter("NumVolumeCheckAccesses", 2L, ksmMetrics);
+    assertCounter("NumVolumeDeletes", 2L, ksmMetrics);
+    assertCounter("NumVolumeLists", 2L, ksmMetrics);
+
+    assertCounter("NumVolumeCreateFails", 1L, ksmMetrics);
+    assertCounter("NumVolumeUpdateFails", 1L, ksmMetrics);
+    assertCounter("NumVolumeInfoFails", 1L, ksmMetrics);
+    assertCounter("NumVolumeCheckAccessFails", 1L, ksmMetrics);
+    assertCounter("NumVolumeDeleteFails", 1L, ksmMetrics);
+    assertCounter("NumVolumeListFails", 1L, ksmMetrics);
+  }
+
+  @Test
+  public void testBucketOps() throws IOException {
+    BucketManager bucketManager = (BucketManager) Whitebox
+        .getInternalState(ksmManager, "bucketManager");
+    BucketManager mockBm = Mockito.spy(bucketManager);
+
+    Mockito.doNothing().when(mockBm).createBucket(null);
+    Mockito.doNothing().when(mockBm).deleteBucket(null, null);
+    Mockito.doReturn(null).when(mockBm).getBucketInfo(null, null);
+    Mockito.doNothing().when(mockBm).setBucketProperty(null);
+    Mockito.doReturn(null).when(mockBm).listBuckets(null, null, null, 0);
+
+    Whitebox.setInternalState(ksmManager, "bucketManager", mockBm);
+    doBucketOps();
+
+    MetricsRecordBuilder ksmMetrics = getMetrics("KSMMetrics");
+    assertCounter("NumBucketOps", 5L, ksmMetrics);
+    assertCounter("NumBucketCreates", 1L, ksmMetrics);
+    assertCounter("NumBucketUpdates", 1L, ksmMetrics);
+    assertCounter("NumBucketInfos", 1L, ksmMetrics);
+    assertCounter("NumBucketDeletes", 1L, ksmMetrics);
+    assertCounter("NumBucketLists", 1L, ksmMetrics);
+
+    // inject exception to test for Failure Metrics
+    Mockito.doThrow(exception).when(mockBm).createBucket(null);
+    Mockito.doThrow(exception).when(mockBm).deleteBucket(null, null);
+    Mockito.doThrow(exception).when(mockBm).getBucketInfo(null, null);
+    Mockito.doThrow(exception).when(mockBm).setBucketProperty(null);
+    Mockito.doThrow(exception).when(mockBm).listBuckets(null, null, null, 0);
+
+    Whitebox.setInternalState(ksmManager, "bucketManager", mockBm);
+    doBucketOps();
+
+    ksmMetrics = getMetrics("KSMMetrics");
+    assertCounter("NumBucketOps", 10L, ksmMetrics);
+    assertCounter("NumBucketCreates", 2L, ksmMetrics);
+    assertCounter("NumBucketUpdates", 2L, ksmMetrics);
+    assertCounter("NumBucketInfos", 2L, ksmMetrics);
+    assertCounter("NumBucketDeletes", 2L, ksmMetrics);
+    assertCounter("NumBucketLists", 2L, ksmMetrics);
+
+    assertCounter("NumBucketCreateFails", 1L, ksmMetrics);
+    assertCounter("NumBucketUpdateFails", 1L, ksmMetrics);
+    assertCounter("NumBucketInfoFails", 1L, ksmMetrics);
+    assertCounter("NumBucketDeleteFails", 1L, ksmMetrics);
+    assertCounter("NumBucketListFails", 1L, ksmMetrics);
+  }
+
+  @Test
+  public void testKeyOps() throws IOException {
+    KeyManager bucketManager = (KeyManager) Whitebox
+        .getInternalState(ksmManager, "keyManager");
+    KeyManager mockKm = Mockito.spy(bucketManager);
+
+    Mockito.doReturn(null).when(mockKm).openKey(null);
+    Mockito.doNothing().when(mockKm).deleteKey(null);
+    Mockito.doReturn(null).when(mockKm).lookupKey(null);
+    Mockito.doReturn(null).when(mockKm).listKeys(null, null, null, null, 0);
+
+    Whitebox.setInternalState(ksmManager, "keyManager", mockKm);
+    doKeyOps();
+
+    MetricsRecordBuilder ksmMetrics = getMetrics("KSMMetrics");
+    assertCounter("NumKeyOps", 4L, ksmMetrics);
+    assertCounter("NumKeyAllocate", 1L, ksmMetrics);
+    assertCounter("NumKeyLookup", 1L, ksmMetrics);
+    assertCounter("NumKeyDeletes", 1L, ksmMetrics);
+    assertCounter("NumKeyLists", 1L, ksmMetrics);
+
+    // inject exception to test for Failure Metrics
+    Mockito.doThrow(exception).when(mockKm).openKey(null);
+    Mockito.doThrow(exception).when(mockKm).deleteKey(null);
+    Mockito.doThrow(exception).when(mockKm).lookupKey(null);
+    Mockito.doThrow(exception).when(mockKm).listKeys(
+        null, null, null, null, 0);
+
+    Whitebox.setInternalState(ksmManager, "keyManager", mockKm);
+    doKeyOps();
+
+    ksmMetrics = getMetrics("KSMMetrics");
+    assertCounter("NumKeyOps", 8L, ksmMetrics);
+    assertCounter("NumKeyAllocate", 2L, ksmMetrics);
+    assertCounter("NumKeyLookup", 2L, ksmMetrics);
+    assertCounter("NumKeyDeletes", 2L, ksmMetrics);
+    assertCounter("NumKeyLists", 2L, ksmMetrics);
+
+    assertCounter("NumKeyAllocateFails", 1L, ksmMetrics);
+    assertCounter("NumKeyLookupFails", 1L, ksmMetrics);
+    assertCounter("NumKeyDeleteFails", 1L, ksmMetrics);
+    assertCounter("NumKeyListFails", 1L, ksmMetrics);
+  }
+
+  /**
+   * Test volume operations with ignoring thrown exception.
+   */
+  private void doVolumeOps() {
+    try {
+      ksmManager.createVolume(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.deleteVolume(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.getVolumeInfo(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.checkVolumeAccess(null, null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.setOwner(null, null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.listAllVolumes(null, null, 0);
+    } catch (IOException ignored) {
+    }
+  }
+
+  /**
+   * Test bucket operations with ignoring thrown exception.
+   */
+  private void doBucketOps() {
+    try {
+      ksmManager.createBucket(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.deleteBucket(null, null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.getBucketInfo(null, null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.setBucketProperty(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.listBuckets(null, null, null, 0);
+    } catch (IOException ignored) {
+    }
+  }
+
+  /**
+   * Test key operations with ignoring thrown exception.
+   */
+  private void doKeyOps() {
+    try {
+      ksmManager.openKey(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.deleteKey(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.lookupKey(null);
+    } catch (IOException ignored) {
+    }
+
+    try {
+      ksmManager.listKeys(null, null, null, null, 0);
+    } catch (IOException ignored) {
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKSMSQLCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKSMSQLCli.java
new file mode 100644
index 0000000..7b92ec7
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKSMSQLCli.java
@@ -0,0 +1,284 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.scm.cli.SQLCLI;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.UUID;
+
+import static org.apache.hadoop.ozone.OzoneConsts.KSM_DB_NAME;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * This class tests the CLI that transforms ksm.db into SQLite DB files.
+ */
+@RunWith(Parameterized.class)
+public class TestKSMSQLCli {
+  private MiniOzoneCluster cluster = null;
+  private StorageHandler storageHandler;
+  private UserArgs userArgs;
+  private OzoneConfiguration conf;
+  private SQLCLI cli;
+
+  private String userName = "userTest";
+  private String adminName = "adminTest";
+  private String volumeName0 = "volumeTest0";
+  private String volumeName1 = "volumeTest1";
+  private String bucketName0 = "bucketTest0";
+  private String bucketName1 = "bucketTest1";
+  private String bucketName2 = "bucketTest2";
+  private String keyName0 = "key0";
+  private String keyName1 = "key1";
+  private String keyName2 = "key2";
+  private String keyName3 = "key3";
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList(new Object[][] {
+        {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB},
+        {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB}
+    });
+  }
+
+  private String metaStoreType;
+
+  public TestKSMSQLCli(String type) {
+    metaStoreType = type;
+  }
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @Before
+  public void setup() throws Exception {
+    conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    storageHandler = new ObjectStoreHandler(conf).getStorageHandler();
+    userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    cluster.waitForClusterToBeReady();
+
+    VolumeArgs createVolumeArgs0 = new VolumeArgs(volumeName0, userArgs);
+    createVolumeArgs0.setUserName(userName);
+    createVolumeArgs0.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs0);
+    VolumeArgs createVolumeArgs1 = new VolumeArgs(volumeName1, userArgs);
+    createVolumeArgs1.setUserName(userName);
+    createVolumeArgs1.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs1);
+
+    BucketArgs bucketArgs0 = new BucketArgs(volumeName0, bucketName0, userArgs);
+    storageHandler.createBucket(bucketArgs0);
+    BucketArgs bucketArgs1 = new BucketArgs(volumeName1, bucketName1, userArgs);
+    storageHandler.createBucket(bucketArgs1);
+    BucketArgs bucketArgs2 = new BucketArgs(volumeName0, bucketName2, userArgs);
+    storageHandler.createBucket(bucketArgs2);
+
+    KeyArgs keyArgs0 =
+        new KeyArgs(volumeName0, bucketName0, keyName0, userArgs);
+    keyArgs0.setSize(100);
+    KeyArgs keyArgs1 =
+        new KeyArgs(volumeName1, bucketName1, keyName1, userArgs);
+    keyArgs1.setSize(200);
+    KeyArgs keyArgs2 =
+        new KeyArgs(volumeName0, bucketName2, keyName2, userArgs);
+    keyArgs2.setSize(300);
+    KeyArgs keyArgs3 =
+        new KeyArgs(volumeName0, bucketName2, keyName3, userArgs);
+    keyArgs3.setSize(400);
+
+    OutputStream stream = storageHandler.newKeyWriter(keyArgs0);
+    stream.close();
+    stream = storageHandler.newKeyWriter(keyArgs1);
+    stream.close();
+    stream = storageHandler.newKeyWriter(keyArgs2);
+    stream.close();
+    stream = storageHandler.newKeyWriter(keyArgs3);
+    stream.close();
+
+    cluster.getKeySpaceManager().stop();
+    cluster.getStorageContainerManager().stop();
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL, metaStoreType);
+    cli = new SQLCLI(conf);
+  }
+
+  @After
+  public void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testKSMDB() throws Exception {
+    String dbOutPath =  GenericTestUtils.getTempPath(
+        UUID.randomUUID() + "/out_sql.db");
+
+    String dbRootPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
+    String dbPath = dbRootPath + "/" + KSM_DB_NAME;
+    String[] args = {"-p", dbPath, "-o", dbOutPath};
+
+    cli.run(args);
+
+    Connection conn = connectDB(dbOutPath);
+    String sql = "SELECT * FROM volumeList";
+    ResultSet rs = executeQuery(conn, sql);
+    List<String> expectedValues =
+        new LinkedList<>(Arrays.asList(volumeName0, volumeName1));
+    while (rs.next()) {
+      String userNameRs = rs.getString("userName");
+      String volumeNameRs = rs.getString("volumeName");
+      assertEquals(userName,  userNameRs.substring(1));
+      assertTrue(expectedValues.remove(volumeNameRs));
+    }
+    assertEquals(0, expectedValues.size());
+
+    sql = "SELECT * FROM volumeInfo";
+    rs = executeQuery(conn, sql);
+    expectedValues =
+        new LinkedList<>(Arrays.asList(volumeName0, volumeName1));
+    while (rs.next()) {
+      String adName = rs.getString("adminName");
+      String ownerName = rs.getString("ownerName");
+      String volumeName = rs.getString("volumeName");
+      assertEquals(adminName, adName);
+      assertEquals(userName, ownerName);
+      assertTrue(expectedValues.remove(volumeName));
+    }
+    assertEquals(0, expectedValues.size());
+
+    sql = "SELECT * FROM aclInfo";
+    rs = executeQuery(conn, sql);
+    expectedValues =
+        new LinkedList<>(Arrays.asList(volumeName0, volumeName1));
+    while (rs.next()) {
+      String adName = rs.getString("adminName");
+      String ownerName = rs.getString("ownerName");
+      String volumeName = rs.getString("volumeName");
+      String type = rs.getString("type");
+      String uName = rs.getString("userName");
+      String rights = rs.getString("rights");
+      assertEquals(adminName, adName);
+      assertEquals(userName, ownerName);
+      assertEquals("USER", type);
+      assertEquals(userName, uName);
+      assertEquals("READ_WRITE", rights);
+      assertTrue(expectedValues.remove(volumeName));
+    }
+    assertEquals(0, expectedValues.size());
+
+    sql = "SELECT * FROM bucketInfo";
+    rs = executeQuery(conn, sql);
+    HashMap<String, String> expectedMap = new HashMap<>();
+    expectedMap.put(bucketName0, volumeName0);
+    expectedMap.put(bucketName2, volumeName0);
+    expectedMap.put(bucketName1, volumeName1);
+    while (rs.next()) {
+      String volumeName = rs.getString("volumeName");
+      String bucketName = rs.getString("bucketName");
+      boolean versionEnabled = rs.getBoolean("versionEnabled");
+      String storegeType = rs.getString("storageType");
+      assertEquals(volumeName, expectedMap.remove(bucketName));
+      assertFalse(versionEnabled);
+      assertEquals("DISK", storegeType);
+    }
+    assertEquals(0, expectedMap.size());
+
+    sql = "SELECT * FROM keyInfo";
+    rs = executeQuery(conn, sql);
+    HashMap<String, List<String>> expectedMap2 = new HashMap<>();
+    // no data written, data size will be 0
+    expectedMap2.put(keyName0,
+        Arrays.asList(volumeName0, bucketName0, "0"));
+    expectedMap2.put(keyName1,
+        Arrays.asList(volumeName1, bucketName1, "0"));
+    expectedMap2.put(keyName2,
+        Arrays.asList(volumeName0, bucketName2, "0"));
+    expectedMap2.put(keyName3,
+        Arrays.asList(volumeName0, bucketName2, "0"));
+    while (rs.next()) {
+      String volumeName = rs.getString("volumeName");
+      String bucketName = rs.getString("bucketName");
+      String keyName = rs.getString("keyName");
+      int dataSize = rs.getInt("dataSize");
+      List<String> vals = expectedMap2.remove(keyName);
+      assertNotNull(vals);
+      assertEquals(vals.get(0), volumeName);
+      assertEquals(vals.get(1), bucketName);
+      assertEquals(vals.get(2), Integer.toString(dataSize));
+    }
+    assertEquals(0, expectedMap2.size());
+
+    conn.close();
+    Files.delete(Paths.get(dbOutPath));
+  }
+
+  private ResultSet executeQuery(Connection conn, String sql)
+      throws SQLException {
+    Statement stmt = conn.createStatement();
+    return stmt.executeQuery(sql);
+  }
+
+  private Connection connectDB(String dbPath) throws Exception {
+    Class.forName("org.sqlite.JDBC");
+    String connectPath =
+        String.format("jdbc:sqlite:%s", dbPath);
+    return DriverManager.getConnection(connectPath);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManager.java
new file mode 100644
index 0000000..a37f005
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManager.java
@@ -0,0 +1,1238 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.hdds.scm.SCMStorage;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServicePort;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.Status;
+import org.apache.hadoop.ozone.web.handlers.ListArgs;
+import org.apache.hadoop.ozone.web.response.ListBuckets;
+import org.apache.hadoop.ozone.web.response.ListKeys;
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BackgroundService;
+import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.utils.MetadataStore;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.net.InetSocketAddress;
+import java.text.ParseException;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.List;
+import java.util.UUID;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS;
+import static org.apache.hadoop.ozone.OzoneConsts.DELETING_KEY_PREFIX;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys.OZONE_KSM_ADDRESS_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_CLIENT_ADDRESS_KEY;
+
+/**
+ * Test Key Space Manager operation in distributed handler scenario.
+ */
+public class TestKeySpaceManager {
+  private static MiniOzoneCluster cluster = null;
+  private static StorageHandler storageHandler;
+  private static UserArgs userArgs;
+  private static KSMMetrics ksmMetrics;
+  private static OzoneConfiguration conf;
+  private static String clusterId;
+  private static String scmId;
+  private static String ksmId;
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    clusterId = UUID.randomUUID().toString();
+    scmId = UUID.randomUUID().toString();
+    ksmId = UUID.randomUUID().toString();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    conf.setInt(OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS, 2);
+    cluster =  MiniOzoneCluster.newBuilder(conf)
+        .setClusterId(clusterId)
+        .setScmId(scmId)
+        .setKsmId(ksmId)
+        .build();
+    cluster.waitForClusterToBeReady();
+    storageHandler = new ObjectStoreHandler(conf).getStorageHandler();
+    userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    ksmMetrics = cluster.getKeySpaceManager().getMetrics();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  // Create a volume and test its attribute after creating them
+  @Test(timeout = 60000)
+  public void testCreateVolume() throws IOException, OzoneException {
+    long volumeCreateFailCount = ksmMetrics.getNumVolumeCreateFails();
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    VolumeArgs getVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    VolumeInfo retVolumeinfo = storageHandler.getVolumeInfo(getVolumeArgs);
+    Assert.assertTrue(retVolumeinfo.getVolumeName().equals(volumeName));
+    Assert.assertTrue(retVolumeinfo.getOwner().getName().equals(userName));
+    Assert.assertEquals(volumeCreateFailCount,
+        ksmMetrics.getNumVolumeCreateFails());
+  }
+
+  // Create a volume and modify the volume owner and then test its attributes
+  @Test(timeout = 60000)
+  public void testChangeVolumeOwner() throws IOException, OzoneException {
+    long volumeCreateFailCount = ksmMetrics.getNumVolumeCreateFails();
+    long volumeInfoFailCount = ksmMetrics.getNumVolumeInfoFails();
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    String newUserName = "user" + RandomStringUtils.randomNumeric(5);
+    createVolumeArgs.setUserName(newUserName);
+    storageHandler.setVolumeOwner(createVolumeArgs);
+
+    VolumeArgs getVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    VolumeInfo retVolumeInfo = storageHandler.getVolumeInfo(getVolumeArgs);
+
+    Assert.assertTrue(retVolumeInfo.getVolumeName().equals(volumeName));
+    Assert.assertFalse(retVolumeInfo.getOwner().getName().equals(userName));
+    Assert.assertTrue(retVolumeInfo.getOwner().getName().equals(newUserName));
+    Assert.assertEquals(volumeCreateFailCount,
+        ksmMetrics.getNumVolumeCreateFails());
+    Assert.assertEquals(volumeInfoFailCount,
+        ksmMetrics.getNumVolumeInfoFails());
+  }
+
+  // Create a volume and modify the volume owner and then test its attributes
+  @Test(timeout = 60000)
+  public void testChangeVolumeQuota() throws IOException, OzoneException {
+    long numVolumeCreateFail = ksmMetrics.getNumVolumeCreateFails();
+    long numVolumeInfoFail = ksmMetrics.getNumVolumeInfoFails();
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    Random rand = new Random();
+
+    // Create a new volume with a quota
+    OzoneQuota createQuota =
+        new OzoneQuota(rand.nextInt(100), OzoneQuota.Units.GB);
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    createVolumeArgs.setQuota(createQuota);
+    storageHandler.createVolume(createVolumeArgs);
+
+    VolumeArgs getVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    VolumeInfo retVolumeInfo = storageHandler.getVolumeInfo(getVolumeArgs);
+    Assert.assertEquals(createQuota.sizeInBytes(),
+        retVolumeInfo.getQuota().sizeInBytes());
+
+    // Set a new quota and test it
+    OzoneQuota setQuota =
+        new OzoneQuota(rand.nextInt(100), OzoneQuota.Units.GB);
+    createVolumeArgs.setQuota(setQuota);
+    storageHandler.setVolumeQuota(createVolumeArgs, false);
+    getVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    retVolumeInfo = storageHandler.getVolumeInfo(getVolumeArgs);
+    Assert.assertEquals(setQuota.sizeInBytes(),
+        retVolumeInfo.getQuota().sizeInBytes());
+
+    // Remove the quota and test it again
+    storageHandler.setVolumeQuota(createVolumeArgs, true);
+    getVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    retVolumeInfo = storageHandler.getVolumeInfo(getVolumeArgs);
+    Assert.assertEquals(OzoneConsts.MAX_QUOTA_IN_BYTES,
+        retVolumeInfo.getQuota().sizeInBytes());
+    Assert.assertEquals(numVolumeCreateFail,
+        ksmMetrics.getNumVolumeCreateFails());
+    Assert.assertEquals(numVolumeInfoFail,
+        ksmMetrics.getNumVolumeInfoFails());
+  }
+
+  // Create a volume and then delete it and then check for deletion
+  @Test(timeout = 60000)
+  public void testDeleteVolume() throws IOException, OzoneException {
+    long volumeCreateFailCount = ksmMetrics.getNumVolumeCreateFails();
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String volumeName1 = volumeName + "_A";
+    String volumeName2 = volumeName + "_AA";
+    VolumeArgs volumeArgs = null;
+    VolumeInfo volumeInfo = null;
+
+    // Create 2 empty volumes with same prefix.
+    volumeArgs = new VolumeArgs(volumeName1, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+
+    volumeArgs = new VolumeArgs(volumeName2, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+
+    volumeArgs  = new VolumeArgs(volumeName1, userArgs);
+    volumeInfo = storageHandler.getVolumeInfo(volumeArgs);
+    Assert.assertTrue(volumeInfo.getVolumeName().equals(volumeName1));
+    Assert.assertTrue(volumeInfo.getOwner().getName().equals(userName));
+    Assert.assertEquals(volumeCreateFailCount,
+        ksmMetrics.getNumVolumeCreateFails());
+
+    // Volume with _A should be able to delete as it is empty.
+    storageHandler.deleteVolume(volumeArgs);
+
+    // Make sure volume with _AA suffix still exists.
+    volumeArgs = new VolumeArgs(volumeName2, userArgs);
+    volumeInfo = storageHandler.getVolumeInfo(volumeArgs);
+    Assert.assertTrue(volumeInfo.getVolumeName().equals(volumeName2));
+
+    // Make sure volume with _A suffix is successfully deleted.
+    exception.expect(IOException.class);
+    exception.expectMessage("Info Volume failed, error:VOLUME_NOT_FOUND");
+    volumeArgs = new VolumeArgs(volumeName1, userArgs);
+    storageHandler.getVolumeInfo(volumeArgs);
+  }
+
+  // Create a volume and a bucket inside the volume,
+  // then delete it and then check for deletion failure
+  @Test(timeout = 60000)
+  public void testFailedDeleteVolume() throws IOException, OzoneException {
+    long numVolumeCreateFails = ksmMetrics.getNumVolumeCreateFails();
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    VolumeArgs getVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    VolumeInfo retVolumeInfo = storageHandler.getVolumeInfo(getVolumeArgs);
+    Assert.assertTrue(retVolumeInfo.getVolumeName().equals(volumeName));
+    Assert.assertTrue(retVolumeInfo.getOwner().getName().equals(userName));
+    Assert.assertEquals(numVolumeCreateFails,
+        ksmMetrics.getNumVolumeCreateFails());
+
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    storageHandler.createBucket(bucketArgs);
+
+    try {
+      storageHandler.deleteVolume(createVolumeArgs);
+      Assert.fail("Expecting deletion should fail "
+          + "because volume is not empty");
+    } catch (IOException ex) {
+      Assert.assertEquals(ex.getMessage(),
+          "Delete Volume failed, error:VOLUME_NOT_EMPTY");
+    }
+    retVolumeInfo = storageHandler.getVolumeInfo(getVolumeArgs);
+    Assert.assertTrue(retVolumeInfo.getVolumeName().equals(volumeName));
+    Assert.assertTrue(retVolumeInfo.getOwner().getName().equals(userName));
+  }
+
+  // Create a volume and test Volume access for a different user
+  @Test(timeout = 60000)
+  public void testAccessVolume() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String[] groupName =
+        {"group" + RandomStringUtils.randomNumeric(5)};
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    createVolumeArgs.setGroups(groupName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    OzoneAcl userAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER, userName,
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    Assert.assertTrue(storageHandler.checkVolumeAccess(volumeName, userAcl));
+    OzoneAcl group = new OzoneAcl(OzoneAcl.OzoneACLType.GROUP, groupName[0],
+        OzoneAcl.OzoneACLRights.READ);
+    Assert.assertTrue(storageHandler.checkVolumeAccess(volumeName, group));
+
+    // Create a different user and access should fail
+    String falseUserName = "user" + RandomStringUtils.randomNumeric(5);
+    OzoneAcl falseUserAcl =
+        new OzoneAcl(OzoneAcl.OzoneACLType.USER, falseUserName,
+            OzoneAcl.OzoneACLRights.READ_WRITE);
+    Assert.assertFalse(storageHandler
+        .checkVolumeAccess(volumeName, falseUserAcl));
+    // Checking access with user name and Group Type should fail
+    OzoneAcl falseGroupAcl = new OzoneAcl(OzoneAcl.OzoneACLType.GROUP, userName,
+        OzoneAcl.OzoneACLRights.READ_WRITE);
+    Assert.assertFalse(storageHandler
+        .checkVolumeAccess(volumeName, falseGroupAcl));
+
+    // Access for acl type world should also fail
+    OzoneAcl worldAcl =
+        new OzoneAcl(OzoneAcl.OzoneACLType.WORLD, "",
+            OzoneAcl.OzoneACLRights.READ);
+    Assert.assertFalse(storageHandler.checkVolumeAccess(volumeName, worldAcl));
+
+    Assert.assertEquals(0, ksmMetrics.getNumVolumeCheckAccessFails());
+    Assert.assertEquals(0, ksmMetrics.getNumVolumeCreateFails());
+  }
+
+  @Test(timeout = 60000)
+  public void testCreateBucket() throws IOException, OzoneException {
+    long numVolumeCreateFail = ksmMetrics.getNumVolumeCreateFails();
+    long numBucketCreateFail = ksmMetrics.getNumBucketCreateFails();
+    long numBucketInfoFail = ksmMetrics.getNumBucketInfoFails();
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs volumeArgs = new VolumeArgs(volumeName, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    storageHandler.createBucket(bucketArgs);
+
+    BucketArgs getBucketArgs = new BucketArgs(volumeName, bucketName,
+        userArgs);
+    BucketInfo bucketInfo = storageHandler.getBucketInfo(getBucketArgs);
+    Assert.assertTrue(bucketInfo.getVolumeName().equals(volumeName));
+    Assert.assertTrue(bucketInfo.getBucketName().equals(bucketName));
+    Assert.assertEquals(numVolumeCreateFail,
+        ksmMetrics.getNumVolumeCreateFails());
+    Assert.assertEquals(numBucketCreateFail,
+        ksmMetrics.getNumBucketCreateFails());
+    Assert.assertEquals(numBucketInfoFail,
+        ksmMetrics.getNumBucketInfoFails());
+  }
+
+  @Test(timeout = 60000)
+  public void testDeleteBucket() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    VolumeArgs volumeArgs = new VolumeArgs(volumeName, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    storageHandler.createBucket(bucketArgs);
+    BucketArgs getBucketArgs = new BucketArgs(volumeName, bucketName,
+        userArgs);
+    BucketInfo bucketInfo = storageHandler.getBucketInfo(getBucketArgs);
+    Assert.assertTrue(bucketInfo.getVolumeName().equals(volumeName));
+    Assert.assertTrue(bucketInfo.getBucketName().equals(bucketName));
+    storageHandler.deleteBucket(bucketArgs);
+    exception.expect(IOException.class);
+    exception.expectMessage("Info Bucket failed, error: BUCKET_NOT_FOUND");
+    storageHandler.getBucketInfo(getBucketArgs);
+  }
+
+  @Test(timeout = 60000)
+  public void testDeleteNonExistingBucket() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    VolumeArgs volumeArgs = new VolumeArgs(volumeName, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    storageHandler.createBucket(bucketArgs);
+    BucketArgs getBucketArgs = new BucketArgs(volumeName, bucketName,
+        userArgs);
+    BucketInfo bucketInfo = storageHandler.getBucketInfo(getBucketArgs);
+    Assert.assertTrue(bucketInfo.getVolumeName().equals(volumeName));
+    Assert.assertTrue(bucketInfo.getBucketName().equals(bucketName));
+    BucketArgs newBucketArgs = new BucketArgs(
+        volumeName, bucketName + "_invalid", userArgs);
+    exception.expect(IOException.class);
+    exception.expectMessage("Delete Bucket failed, error:BUCKET_NOT_FOUND");
+    storageHandler.deleteBucket(newBucketArgs);
+  }
+
+
+  @Test(timeout = 60000)
+  public void testDeleteNonEmptyBucket() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    VolumeArgs volumeArgs = new VolumeArgs(volumeName, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    storageHandler.createBucket(bucketArgs);
+    BucketArgs getBucketArgs = new BucketArgs(volumeName, bucketName,
+        userArgs);
+    BucketInfo bucketInfo = storageHandler.getBucketInfo(getBucketArgs);
+    Assert.assertTrue(bucketInfo.getVolumeName().equals(volumeName));
+    Assert.assertTrue(bucketInfo.getBucketName().equals(bucketName));
+    String dataString = RandomStringUtils.randomAscii(100);
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    keyArgs.setSize(100);
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+    exception.expect(IOException.class);
+    exception.expectMessage("Delete Bucket failed, error:BUCKET_NOT_EMPTY");
+    storageHandler.deleteBucket(bucketArgs);
+  }
+
+  /**
+   * Basic test of both putKey and getKey from KSM, as one can not be tested
+   * without the other.
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  @Test
+  public void testGetKeyWriterReader() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    long numKeyAllocates = ksmMetrics.getNumKeyAllocates();
+    long numKeyLookups = ksmMetrics.getNumKeyLookups();
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    String dataString = RandomStringUtils.randomAscii(100);
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    keyArgs.setSize(100);
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+    Assert.assertEquals(1 + numKeyAllocates, ksmMetrics.getNumKeyAllocates());
+
+    byte[] data = new byte[dataString.length()];
+    try (InputStream in = storageHandler.newKeyReader(keyArgs)) {
+      in.read(data);
+    }
+    Assert.assertEquals(dataString, DFSUtil.bytes2String(data));
+    Assert.assertEquals(1 + numKeyLookups, ksmMetrics.getNumKeyLookups());
+  }
+
+  /**
+   * Test write the same key twice, the second write should fail, as currently
+   * key overwrite is not supported.
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  @Test
+  public void testKeyOverwrite() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    long numKeyAllocateFails = ksmMetrics.getNumKeyAllocateFails();
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    keyArgs.setSize(100);
+    String dataString = RandomStringUtils.randomAscii(100);
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+
+    // We allow the key overwrite to be successful. Please note : Till
+    // HDFS-11922 is fixed this causes a data block leak on the data node side.
+    // That is this overwrite only overwrites the keys on KSM. We need to
+    // garbage collect those blocks from datanode.
+    KeyArgs keyArgs2 = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    storageHandler.newKeyWriter(keyArgs2);
+    Assert
+        .assertEquals(numKeyAllocateFails, ksmMetrics.getNumKeyAllocateFails());
+  }
+
+  /**
+   * Test get a non-exiting key.
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  @Test
+  public void testGetNonExistKey() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    long numKeyLookupFails = ksmMetrics.getNumKeyLookupFails();
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    // try to get the key, should fail as it hasn't been created
+    exception.expect(IOException.class);
+    exception.expectMessage("KEY_NOT_FOUND");
+    storageHandler.newKeyReader(keyArgs);
+    Assert.assertEquals(1 + numKeyLookupFails,
+        ksmMetrics.getNumKeyLookupFails());
+  }
+
+  /**
+   * Test delete keys for ksm.
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  @Test
+  public void testDeleteKey() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    long numKeyDeletes = ksmMetrics.getNumKeyDeletes();
+    long numKeyDeleteFails = ksmMetrics.getNumKeyDeletesFails();
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    storageHandler.createBucket(bucketArgs);
+
+    KeyArgs keyArgs = new KeyArgs(keyName, bucketArgs);
+    keyArgs.setSize(100);
+    String dataString = RandomStringUtils.randomAscii(100);
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+
+    storageHandler.deleteKey(keyArgs);
+    Assert.assertEquals(1 + numKeyDeletes, ksmMetrics.getNumKeyDeletes());
+
+    // Make sure the deleted key has been renamed.
+    MetadataStore store = cluster.getKeySpaceManager().
+        getMetadataManager().getStore();
+    List<Map.Entry<byte[], byte[]>> list = store.getRangeKVs(null, 10,
+        new MetadataKeyFilters.KeyPrefixFilter(DELETING_KEY_PREFIX));
+    Assert.assertEquals(1, list.size());
+
+    // Check the block key in SCM, make sure it's deleted.
+    Set<String> keys = new HashSet<>();
+    keys.add(keyArgs.getResourceName());
+    exception.expect(IOException.class);
+    exception.expectMessage("Specified block key does not exist");
+    cluster.getStorageContainerManager().getBlockLocations(keys);
+
+    // Delete the key again to test deleting non-existing key.
+    exception.expect(IOException.class);
+    exception.expectMessage("KEY_NOT_FOUND");
+    storageHandler.deleteKey(keyArgs);
+    Assert.assertEquals(1 + numKeyDeleteFails,
+        ksmMetrics.getNumKeyDeletesFails());
+  }
+
+  @Test(timeout = 60000)
+  public void testListBuckets() throws IOException, OzoneException {
+    ListBuckets result = null;
+    ListArgs listBucketArgs = null;
+
+    // Create volume - volA.
+    final String volAname = "volA";
+    VolumeArgs volAArgs = new VolumeArgs(volAname, userArgs);
+    volAArgs.setUserName("userA");
+    volAArgs.setAdminName("adminA");
+    storageHandler.createVolume(volAArgs);
+
+    // Create 20 buckets in volA for tests.
+    for (int i=0; i<10; i++) {
+      // Create "/volA/aBucket_0" to "/volA/aBucket_9" buckets in volA volume.
+      BucketArgs aBuckets = new BucketArgs(volAname,
+          "aBucket_" + i, userArgs);
+      if(i % 3 == 0) {
+        aBuckets.setStorageType(StorageType.ARCHIVE);
+      } else {
+        aBuckets.setStorageType(StorageType.DISK);
+      }
+      storageHandler.createBucket(aBuckets);
+
+      // Create "/volA/bBucket_0" to "/volA/bBucket_9" buckets in volA volume.
+      BucketArgs bBuckets = new BucketArgs(volAname,
+          "bBucket_" + i, userArgs);
+      if(i % 3 == 0) {
+        bBuckets.setStorageType(StorageType.RAM_DISK);
+      } else {
+        bBuckets.setStorageType(StorageType.SSD);
+      }
+      storageHandler.createBucket(bBuckets);
+    }
+
+    VolumeArgs volArgs = new VolumeArgs(volAname, userArgs);
+
+    // List all buckets in volA.
+    listBucketArgs = new ListArgs(volArgs, null, 100, null);
+    result = storageHandler.listBuckets(listBucketArgs);
+    Assert.assertEquals(20, result.getBuckets().size());
+    List<BucketInfo> archiveBuckets = result.getBuckets().stream()
+        .filter(item -> item.getStorageType() == StorageType.ARCHIVE)
+        .collect(Collectors.toList());
+    Assert.assertEquals(4, archiveBuckets.size());
+
+    // List buckets with prefix "aBucket".
+    listBucketArgs = new ListArgs(volArgs, "aBucket", 100, null);
+    result = storageHandler.listBuckets(listBucketArgs);
+    Assert.assertEquals(10, result.getBuckets().size());
+    Assert.assertTrue(result.getBuckets().stream()
+        .allMatch(entry -> entry.getBucketName().startsWith("aBucket")));
+
+    // List a certain number of buckets.
+    listBucketArgs = new ListArgs(volArgs, null, 3, null);
+    result = storageHandler.listBuckets(listBucketArgs);
+    Assert.assertEquals(3, result.getBuckets().size());
+    Assert.assertEquals("aBucket_0",
+        result.getBuckets().get(0).getBucketName());
+    Assert.assertEquals("aBucket_1",
+        result.getBuckets().get(1).getBucketName());
+    Assert.assertEquals("aBucket_2",
+        result.getBuckets().get(2).getBucketName());
+
+    // List a certain number of buckets from the startKey.
+    listBucketArgs = new ListArgs(volArgs, null, 2, "bBucket_3");
+    result = storageHandler.listBuckets(listBucketArgs);
+    Assert.assertEquals(2, result.getBuckets().size());
+    Assert.assertEquals("bBucket_4",
+        result.getBuckets().get(0).getBucketName());
+    Assert.assertEquals("bBucket_5",
+        result.getBuckets().get(1).getBucketName());
+
+    // Provide an invalid bucket name as start key.
+    listBucketArgs = new ListArgs(volArgs, null, 100, "unknown_bucket_name");
+    ListBuckets buckets = storageHandler.listBuckets(listBucketArgs);
+    Assert.assertEquals(buckets.getBuckets().size(), 0);
+
+    // Use all arguments.
+    listBucketArgs = new ListArgs(volArgs, "b", 5, "bBucket_7");
+    result = storageHandler.listBuckets(listBucketArgs);
+    Assert.assertEquals(2, result.getBuckets().size());
+    Assert.assertEquals("bBucket_8",
+        result.getBuckets().get(0).getBucketName());
+    Assert.assertEquals("bBucket_9",
+        result.getBuckets().get(1).getBucketName());
+
+    // Provide an invalid maxKeys argument.
+    try {
+      listBucketArgs = new ListArgs(volArgs, null, -1, null);
+      storageHandler.listBuckets(listBucketArgs);
+      Assert.fail("Expecting an error when the given"
+          + " maxKeys argument is invalid.");
+    } catch (Exception e) {
+      Assert.assertTrue(e.getMessage()
+          .contains(String.format("the value must be in range (0, %d]",
+              OzoneConsts.MAX_LISTBUCKETS_SIZE)));
+    }
+
+    // Provide an invalid volume name.
+    VolumeArgs invalidVolArgs = new VolumeArgs("invalid_name", userArgs);
+    try {
+      listBucketArgs = new ListArgs(invalidVolArgs, null, 100, null);
+      storageHandler.listBuckets(listBucketArgs);
+      Assert.fail("Expecting an error when the given volume name is invalid.");
+    } catch (Exception e) {
+      Assert.assertTrue(e instanceof IOException);
+      Assert.assertTrue(e.getMessage()
+          .contains(Status.VOLUME_NOT_FOUND.name()));
+    }
+  }
+
+  /**
+   * Test list keys.
+   * @throws IOException
+   * @throws OzoneException
+   */
+  @Test
+  public void testListKeys() throws IOException, OzoneException {
+    ListKeys result = null;
+    ListArgs listKeyArgs = null;
+
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    // Write 20 keys in bucket.
+    int numKeys = 20;
+    String keyName = "Key";
+    KeyArgs keyArgs = null;
+    for (int i = 0; i < numKeys; i++) {
+      if (i % 2 == 0) {
+        // Create /volume/bucket/aKey[0,2,4,...,18] in bucket.
+        keyArgs = new KeyArgs("a" + keyName + i, bucketArgs);
+      } else {
+        // Create /volume/bucket/bKey[1,3,5,...,19] in bucket.
+        keyArgs = new KeyArgs("b" + keyName + i, bucketArgs);
+      }
+      keyArgs.setSize(4096);
+
+      // Just for testing list keys call, so no need to write real data.
+      OutputStream stream = storageHandler.newKeyWriter(keyArgs);
+      stream.close();
+    }
+
+    // List all keys in bucket.
+    bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    listKeyArgs = new ListArgs(bucketArgs, null, 100, null);
+    result = storageHandler.listKeys(listKeyArgs);
+    Assert.assertEquals(numKeys, result.getKeyList().size());
+    List<KeyInfo> allKeys = result.getKeyList().stream()
+        .filter(item -> item.getSize() == 4096)
+        .collect(Collectors.toList());
+
+    // List keys with prefix "aKey".
+    listKeyArgs = new ListArgs(bucketArgs, "aKey", 100, null);
+    result = storageHandler.listKeys(listKeyArgs);
+    Assert.assertEquals(numKeys / 2, result.getKeyList().size());
+    Assert.assertTrue(result.getKeyList().stream()
+        .allMatch(entry -> entry.getKeyName().startsWith("aKey")));
+
+    // List a certain number of keys.
+    listKeyArgs = new ListArgs(bucketArgs, null, 3, null);
+    result = storageHandler.listKeys(listKeyArgs);
+    Assert.assertEquals(3, result.getKeyList().size());
+    Assert.assertEquals("aKey0",
+        result.getKeyList().get(0).getKeyName());
+    Assert.assertEquals("aKey10",
+        result.getKeyList().get(1).getKeyName());
+    Assert.assertEquals("aKey12",
+        result.getKeyList().get(2).getKeyName());
+
+    // List a certain number of keys from the startKey.
+    listKeyArgs = new ListArgs(bucketArgs, null, 2, "bKey1");
+    result = storageHandler.listKeys(listKeyArgs);
+    Assert.assertEquals(2, result.getKeyList().size());
+    Assert.assertEquals("bKey11",
+        result.getKeyList().get(0).getKeyName());
+    Assert.assertEquals("bKey13",
+        result.getKeyList().get(1).getKeyName());
+
+    // Provide an invalid key name as start key.
+    listKeyArgs = new ListArgs(bucketArgs, null, 100, "invalid_start_key");
+    ListKeys keys = storageHandler.listKeys(listKeyArgs);
+    Assert.assertEquals(keys.getKeyList().size(), 0);
+
+    // Provide an invalid maxKeys argument.
+    try {
+      listKeyArgs = new ListArgs(bucketArgs, null, -1, null);
+      storageHandler.listBuckets(listKeyArgs);
+      Assert.fail("Expecting an error when the given"
+          + " maxKeys argument is invalid.");
+    } catch (Exception e) {
+      GenericTestUtils.assertExceptionContains(
+          String.format("the value must be in range (0, %d]",
+              OzoneConsts.MAX_LISTKEYS_SIZE), e);
+    }
+
+    // Provide an invalid bucket name.
+    bucketArgs = new BucketArgs("invalid_bucket", createVolumeArgs);
+    try {
+      listKeyArgs = new ListArgs(bucketArgs, null, numKeys, null);
+      storageHandler.listKeys(listKeyArgs);
+      Assert.fail(
+          "Expecting an error when the given bucket name is invalid.");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains(
+          Status.BUCKET_NOT_FOUND.name(), e);
+    }
+  }
+
+  @Test
+  public void testListVolumes() throws IOException, OzoneException {
+
+    String user0 = "testListVolumes-user-0";
+    String user1 = "testListVolumes-user-1";
+    String adminUser = "testListVolumes-admin";
+    ListArgs listVolumeArgs;
+    ListVolumes volumes;
+
+    // Create 10 volumes by user0 and user1
+    String[] user0vols = new String[10];
+    String[] user1vols = new String[10];
+    for (int i =0; i<10; i++) {
+      VolumeArgs createVolumeArgs;
+      String user0VolName = "Vol-" + user0 + "-" + i;
+      user0vols[i] = user0VolName;
+      createVolumeArgs = new VolumeArgs(user0VolName, userArgs);
+      createVolumeArgs.setUserName(user0);
+      createVolumeArgs.setAdminName(adminUser);
+      createVolumeArgs.setQuota(new OzoneQuota(i, OzoneQuota.Units.GB));
+      storageHandler.createVolume(createVolumeArgs);
+
+      String user1VolName = "Vol-" + user1 + "-" + i;
+      user1vols[i] = user1VolName;
+      createVolumeArgs = new VolumeArgs(user1VolName, userArgs);
+      createVolumeArgs.setUserName(user1);
+      createVolumeArgs.setAdminName(adminUser);
+      createVolumeArgs.setQuota(new OzoneQuota(i, OzoneQuota.Units.GB));
+      storageHandler.createVolume(createVolumeArgs);
+    }
+
+    // Test list all volumes
+    UserArgs userArgs0 = new UserArgs(user0, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    listVolumeArgs = new ListArgs(userArgs0, "Vol-testListVolumes", 100, null);
+    listVolumeArgs.setRootScan(true);
+    volumes = storageHandler.listVolumes(listVolumeArgs);
+    Assert.assertEquals(20, volumes.getVolumes().size());
+
+    // Test list all volumes belongs to an user
+    listVolumeArgs = new ListArgs(userArgs0, null, 100, null);
+    listVolumeArgs.setRootScan(false);
+    volumes = storageHandler.listVolumes(listVolumeArgs);
+    Assert.assertEquals(10, volumes.getVolumes().size());
+
+    // Test prefix
+    listVolumeArgs = new ListArgs(userArgs0,
+        "Vol-" + user0 + "-3", 100, null);
+    volumes = storageHandler.listVolumes(listVolumeArgs);
+    Assert.assertEquals(1, volumes.getVolumes().size());
+    Assert.assertEquals(user0vols[3],
+        volumes.getVolumes().get(0).getVolumeName());
+    Assert.assertEquals(user0,
+        volumes.getVolumes().get(0).getOwner().getName());
+
+    // Test list volumes by user
+    UserArgs userArgs1 = new UserArgs(user1, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    listVolumeArgs = new ListArgs(userArgs1, null, 100, null);
+    listVolumeArgs.setRootScan(false);
+    volumes = storageHandler.listVolumes(listVolumeArgs);
+    Assert.assertEquals(10, volumes.getVolumes().size());
+    Assert.assertEquals(user1,
+        volumes.getVolumes().get(3).getOwner().getName());
+
+    // Make sure all available fields are returned
+    final String user0vol4 = "Vol-" + user0 + "-4";
+    final String user0vol5 = "Vol-" + user0 + "-5";
+    listVolumeArgs = new ListArgs(userArgs0, null, 1, user0vol4);
+    listVolumeArgs.setRootScan(false);
+    volumes = storageHandler.listVolumes(listVolumeArgs);
+    Assert.assertEquals(1, volumes.getVolumes().size());
+    Assert.assertEquals(user0,
+        volumes.getVolumes().get(0).getOwner().getName());
+    Assert.assertEquals(user0vol5,
+        volumes.getVolumes().get(0).getVolumeName());
+    Assert.assertEquals(5,
+        volumes.getVolumes().get(0).getQuota().getSize());
+    Assert.assertEquals(OzoneQuota.Units.GB,
+        volumes.getVolumes().get(0).getQuota().getUnit());
+
+    // User doesn't have volumes
+    UserArgs userArgsX = new UserArgs("unknwonUser", OzoneUtils.getRequestID(),
+        null, null, null, null);
+    listVolumeArgs = new ListArgs(userArgsX, null, 100, null);
+    listVolumeArgs.setRootScan(false);
+    volumes = storageHandler.listVolumes(listVolumeArgs);
+    Assert.assertEquals(0, volumes.getVolumes().size());
+  }
+
+  /**
+   * Test get key information.
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  @Test
+  public void testGetKeyInfo() throws IOException,
+      OzoneException, ParseException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    long currentTime = Time.now();
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    String keyName = "testKey";
+    KeyArgs keyArgs = new KeyArgs(keyName, bucketArgs);
+    keyArgs.setSize(4096);
+
+
+    OutputStream stream = storageHandler.newKeyWriter(keyArgs);
+    stream.close();
+
+    KeyInfo keyInfo = storageHandler.getKeyInfo(keyArgs);
+    // Compare the time in second unit since the date string reparsed to
+    // millisecond will lose precision.
+    Assert.assertTrue((OzoneUtils.formatDate(keyInfo.getCreatedOn())
+        / 1000) >= (currentTime / 1000));
+    Assert.assertTrue((OzoneUtils.formatDate(keyInfo.getModifiedOn())
+        / 1000) >= (currentTime / 1000));
+    Assert.assertEquals(keyName, keyInfo.getKeyName());
+    // with out data written, the size would be 0
+    Assert.assertEquals(0, keyInfo.getSize());
+  }
+
+  /**
+   * Test that the write can proceed without having to set the right size.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testWriteSize() throws IOException, OzoneException {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    String dataString = RandomStringUtils.randomAscii(100);
+    // write a key without specifying size at all
+    String keyName = "testKey";
+    KeyArgs keyArgs = new KeyArgs(keyName, bucketArgs);
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+    byte[] data = new byte[dataString.length()];
+    try (InputStream in = storageHandler.newKeyReader(keyArgs)) {
+      in.read(data);
+    }
+    Assert.assertEquals(dataString, DFSUtil.bytes2String(data));
+
+    // write a key with a size, but write above it.
+    String keyName1 = "testKey1";
+    KeyArgs keyArgs1 = new KeyArgs(keyName1, bucketArgs);
+    keyArgs1.setSize(30);
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs1)) {
+      stream.write(dataString.getBytes());
+    }
+    byte[] data1 = new byte[dataString.length()];
+    try (InputStream in = storageHandler.newKeyReader(keyArgs1)) {
+      in.read(data1);
+    }
+    Assert.assertEquals(dataString, DFSUtil.bytes2String(data1));
+  }
+
+  /**
+   * Tests the RPC call for getting scmId and clusterId from SCM.
+   * @throws IOException
+   */
+  @Test
+  public void testGetScmInfo() throws IOException {
+    ScmInfo info = cluster.getKeySpaceManager().getScmInfo();
+    Assert.assertEquals(clusterId, info.getClusterId());
+    Assert.assertEquals(scmId, info.getScmId());
+  }
+
+
+  @Test
+  public void testExpiredOpenKey() throws Exception {
+    BackgroundService openKeyCleanUpService = ((KeyManagerImpl)cluster
+        .getKeySpaceManager().getKeyManager()).getOpenKeyCleanupService();
+
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    // open some keys.
+
+    KeyArgs keyArgs1 = new KeyArgs("testKey1", bucketArgs);
+    KeyArgs keyArgs2 = new KeyArgs("testKey2", bucketArgs);
+    KeyArgs keyArgs3 = new KeyArgs("testKey3", bucketArgs);
+    KeyArgs keyArgs4 = new KeyArgs("testKey4", bucketArgs);
+    List<BlockGroup> openKeys;
+    storageHandler.newKeyWriter(keyArgs1);
+    storageHandler.newKeyWriter(keyArgs2);
+    storageHandler.newKeyWriter(keyArgs3);
+    storageHandler.newKeyWriter(keyArgs4);
+
+    Set<String> expected = Stream.of(
+        "testKey1", "testKey2", "testKey3", "testKey4")
+        .collect(Collectors.toSet());
+
+    // Now all k1-k4 should be in open state, so ExpiredOpenKeys should not
+    // contain these values.
+    openKeys = cluster.getKeySpaceManager()
+        .getMetadataManager().getExpiredOpenKeys();
+
+    for (BlockGroup bg : openKeys) {
+      String[] subs = bg.getGroupID().split("/");
+      String keyName = subs[subs.length - 1];
+      Assert.assertFalse(expected.contains(keyName));
+    }
+
+    Thread.sleep(2000);
+    // Now all k1-k4 should be in ExpiredOpenKeys
+    openKeys = cluster.getKeySpaceManager()
+        .getMetadataManager().getExpiredOpenKeys();
+    for (BlockGroup bg : openKeys) {
+      String[] subs = bg.getGroupID().split("/");
+      String keyName = subs[subs.length - 1];
+      if (expected.contains(keyName)) {
+        expected.remove(keyName);
+      }
+    }
+    Assert.assertEquals(0, expected.size());
+
+    KeyArgs keyArgs5 = new KeyArgs("testKey5", bucketArgs);
+    storageHandler.newKeyWriter(keyArgs5);
+
+    openKeyCleanUpService.triggerBackgroundTaskForTesting();
+    Thread.sleep(2000);
+    // now all k1-k4 should have been removed by the clean-up task, only k5
+    // should be present in ExpiredOpenKeys.
+    openKeys =
+        cluster.getKeySpaceManager().getMetadataManager().getExpiredOpenKeys();
+    System.out.println(openKeys);
+    boolean key5found = false;
+    Set<String> removed = Stream.of(
+        "testKey1", "testKey2", "testKey3", "testKey4")
+        .collect(Collectors.toSet());
+    for (BlockGroup bg : openKeys) {
+      String[] subs = bg.getGroupID().split("/");
+      String keyName = subs[subs.length - 1];
+      Assert.assertFalse(removed.contains(keyName));
+      if (keyName.equals("testKey5")) {
+        key5found = true;
+      }
+    }
+    Assert.assertTrue(key5found);
+  }
+
+  /**
+   * Tests the KSM Initialization.
+   * @throws IOException
+   */
+  @Test
+  public void testKSMInitialization() throws IOException {
+    // Read the version file info from KSM version file
+    KSMStorage ksmStorage = cluster.getKeySpaceManager().getKsmStorage();
+    SCMStorage scmStorage = new SCMStorage(conf);
+    // asserts whether cluster Id and SCM ID are properly set in SCM Version
+    // file.
+    Assert.assertEquals(clusterId, scmStorage.getClusterID());
+    Assert.assertEquals(scmId, scmStorage.getScmId());
+    // asserts whether KSM Id is properly set in KSM Version file.
+    Assert.assertEquals(ksmId, ksmStorage.getKsmId());
+    // asserts whether the SCM info is correct in KSM Version file.
+    Assert.assertEquals(clusterId, ksmStorage.getClusterID());
+    Assert.assertEquals(scmId, ksmStorage.getScmId());
+  }
+
+  /**
+   * Tests the KSM Initialization Failure.
+   * @throws IOException
+   */
+  @Test
+  public void testKSMInitializationFailure() throws Exception {
+    OzoneConfiguration config = new OzoneConfiguration();
+    final String path =
+        GenericTestUtils.getTempPath(UUID.randomUUID().toString());
+    Path metaDirPath = Paths.get(path, "ksm-meta");
+    config.set(OzoneConfigKeys.OZONE_METADATA_DIRS, metaDirPath.toString());
+    config.setBoolean(OzoneConfigKeys.OZONE_ENABLED, true);
+    config.set(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY, "127.0.0.1:0");
+    config.set(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY,
+        conf.get(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY));
+    exception.expect(KSMException.class);
+    exception.expectMessage("KSM not initialized.");
+    KeySpaceManager.createKSM(null, config);
+    KSMStorage ksmStore = new KSMStorage(config);
+    ksmStore.setClusterId("testClusterId");
+    ksmStore.setScmId("testScmId");
+    // writes the version file properties
+    ksmStore.initialize();
+    exception.expect(KSMException.class);
+    exception.expectMessage("SCM version info mismatch.");
+    KeySpaceManager.createKSM(null, conf);
+  }
+
+  @Test
+  public void testGetServiceList() throws IOException {
+    long numGetServiceListCalls = ksmMetrics.getNumGetServiceLists();
+    List<ServiceInfo> services = cluster.getKeySpaceManager().getServiceList();
+
+    Assert.assertEquals(numGetServiceListCalls + 1,
+        ksmMetrics.getNumGetServiceLists());
+
+    ServiceInfo ksmInfo = services.stream().filter(
+        a -> a.getNodeType().equals(HddsProtos.NodeType.KSM))
+        .collect(Collectors.toList()).get(0);
+    InetSocketAddress ksmAddress = new InetSocketAddress(ksmInfo.getHostname(),
+        ksmInfo.getPort(ServicePort.Type.RPC));
+    Assert.assertEquals(NetUtils.createSocketAddr(
+        conf.get(OZONE_KSM_ADDRESS_KEY)), ksmAddress);
+
+    ServiceInfo scmInfo = services.stream().filter(
+        a -> a.getNodeType().equals(HddsProtos.NodeType.SCM))
+        .collect(Collectors.toList()).get(0);
+    InetSocketAddress scmAddress = new InetSocketAddress(scmInfo.getHostname(),
+        scmInfo.getPort(ServicePort.Type.RPC));
+    Assert.assertEquals(NetUtils.createSocketAddr(
+        conf.get(OZONE_SCM_CLIENT_ADDRESS_KEY)), scmAddress);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java
new file mode 100644
index 0000000..2fb70f9
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServicePort;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.util.EntityUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.net.InetSocketAddress;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients;
+import static org.apache.hadoop.ozone.KsmUtils.getKsmAddressForClients;
+
+/**
+ * This class is to test the REST interface exposed by KeySpaceManager.
+ */
+public class TestKeySpaceManagerRestInterface {
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testGetServiceList() throws Exception {
+    KeySpaceManagerHttpServer server =
+        cluster.getKeySpaceManager().getHttpServer();
+    HttpClient client = HttpClients.createDefault();
+    String connectionUri = "http://" +
+        NetUtils.getHostPortString(server.getHttpAddress());
+    HttpGet httpGet = new HttpGet(connectionUri + "/serviceList");
+    HttpResponse response = client.execute(httpGet);
+    String serviceListJson = EntityUtils.toString(response.getEntity());
+
+    ObjectMapper objectMapper = new ObjectMapper();
+    TypeReference<List<ServiceInfo>> serviceInfoReference =
+        new TypeReference<List<ServiceInfo>>() {};
+    List<ServiceInfo> serviceInfos = objectMapper.readValue(
+        serviceListJson, serviceInfoReference);
+    Map<HddsProtos.NodeType, ServiceInfo> serviceMap = new HashMap<>();
+    for (ServiceInfo serviceInfo : serviceInfos) {
+      serviceMap.put(serviceInfo.getNodeType(), serviceInfo);
+    }
+
+    InetSocketAddress ksmAddress =
+        getKsmAddressForClients(conf);
+    ServiceInfo ksmInfo = serviceMap.get(HddsProtos.NodeType.KSM);
+
+    Assert.assertEquals(ksmAddress.getHostName(), ksmInfo.getHostname());
+    Assert.assertEquals(ksmAddress.getPort(),
+        ksmInfo.getPort(ServicePort.Type.RPC));
+    Assert.assertEquals(server.getHttpAddress().getPort(),
+        ksmInfo.getPort(ServicePort.Type.HTTP));
+
+    InetSocketAddress scmAddress =
+        getScmAddressForClients(conf);
+    ServiceInfo scmInfo = serviceMap.get(HddsProtos.NodeType.SCM);
+
+    Assert.assertEquals(scmAddress.getHostName(), scmInfo.getHostname());
+    Assert.assertEquals(scmAddress.getPort(),
+        scmInfo.getPort(ServicePort.Type.RPC));
+
+    ServiceInfo datanodeInfo = serviceMap.get(HddsProtos.NodeType.DATANODE);
+    DatanodeDetails datanodeDetails = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails();
+    Assert.assertEquals(datanodeDetails.getHostName(),
+        datanodeInfo.getHostname());
+
+    Map<ServicePort.Type, Integer> ports = datanodeInfo.getPorts();
+    for(ServicePort.Type type : ports.keySet()) {
+      switch (type) {
+      case HTTP:
+      case HTTPS:
+        Assert.assertEquals(datanodeDetails.getOzoneRestPort(),
+            (int) ports.get(type));
+        break;
+      default:
+        // KSM only sends Datanode's info port details
+        // i.e. HTTP or HTTPS
+        // Other ports are not expected as of now.
+        Assert.fail();
+        break;
+      }
+    }
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKsmBlockVersioning.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKsmBlockVersioning.java
new file mode 100644
index 0000000..34bbaf6
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKsmBlockVersioning.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.LinkedList;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * This class tests the versioning of blocks from KSM side.
+ */
+public class TestKsmBlockVersioning {
+  private static MiniOzoneCluster cluster = null;
+  private static UserArgs userArgs;
+  private static OzoneConfiguration conf;
+  private static KeySpaceManager keySpaceManager;
+  private static StorageHandler storageHandler;
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    storageHandler = new ObjectStoreHandler(conf).getStorageHandler();
+    userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    keySpaceManager = cluster.getKeySpaceManager();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testAllocateCommit() throws Exception {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .setDataSize(1000)
+        .build();
+
+    // 1st update, version 0
+    OpenKeySession openKey = keySpaceManager.openKey(keyArgs);
+    keySpaceManager.commitKey(keyArgs, openKey.getId());
+
+    KsmKeyInfo keyInfo = keySpaceManager.lookupKey(keyArgs);
+    KsmKeyLocationInfoGroup highestVersion =
+        checkVersions(keyInfo.getKeyLocationVersions());
+    assertEquals(0, highestVersion.getVersion());
+    assertEquals(1, highestVersion.getLocationList().size());
+
+    // 2nd update, version 1
+    openKey = keySpaceManager.openKey(keyArgs);
+    //KsmKeyLocationInfo locationInfo =
+    //    keySpaceManager.allocateBlock(keyArgs, openKey.getId());
+    keySpaceManager.commitKey(keyArgs, openKey.getId());
+
+    keyInfo = keySpaceManager.lookupKey(keyArgs);
+    highestVersion = checkVersions(keyInfo.getKeyLocationVersions());
+    assertEquals(1, highestVersion.getVersion());
+    assertEquals(2, highestVersion.getLocationList().size());
+
+    // 3rd update, version 2
+    openKey = keySpaceManager.openKey(keyArgs);
+    // this block will be appended to the latest version of version 2.
+    keySpaceManager.allocateBlock(keyArgs, openKey.getId());
+    keySpaceManager.commitKey(keyArgs, openKey.getId());
+
+    keyInfo = keySpaceManager.lookupKey(keyArgs);
+    highestVersion = checkVersions(keyInfo.getKeyLocationVersions());
+    assertEquals(2, highestVersion.getVersion());
+    assertEquals(4, highestVersion.getLocationList().size());
+  }
+
+  private KsmKeyLocationInfoGroup checkVersions(
+      List<KsmKeyLocationInfoGroup> versions) {
+    KsmKeyLocationInfoGroup currentVersion = null;
+    for (KsmKeyLocationInfoGroup version : versions) {
+      if (currentVersion != null) {
+        assertEquals(currentVersion.getVersion() + 1, version.getVersion());
+        for (KsmKeyLocationInfo info : currentVersion.getLocationList()) {
+          boolean found = false;
+          // all the blocks from the previous version must present in the next
+          // version
+          for (KsmKeyLocationInfo info2 : version.getLocationList()) {
+            if (info.getBlockID().equals(info2.getBlockID())) {
+              found = true;
+              break;
+            }
+          }
+          assertTrue(found);
+        }
+      }
+      currentVersion = version;
+    }
+    return currentVersion;
+  }
+
+  @Test
+  public void testReadLatestVersion() throws Exception {
+
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(volumeName)
+        .setBucketName(bucketName)
+        .setKeyName(keyName)
+        .setDataSize(1000)
+        .build();
+
+    String dataString = RandomStringUtils.randomAlphabetic(100);
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    // this write will create 1st version with one block
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+    byte[] data = new byte[dataString.length()];
+    try (InputStream in = storageHandler.newKeyReader(keyArgs)) {
+      in.read(data);
+    }
+    KsmKeyInfo keyInfo = keySpaceManager.lookupKey(ksmKeyArgs);
+    assertEquals(dataString, DFSUtil.bytes2String(data));
+    assertEquals(0, keyInfo.getLatestVersionLocations().getVersion());
+    assertEquals(1,
+        keyInfo.getLatestVersionLocations().getLocationList().size());
+
+    // this write will create 2nd version, 2nd version will contain block from
+    // version 1, and add a new block
+    dataString = RandomStringUtils.randomAlphabetic(10);
+    data = new byte[dataString.length()];
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+    try (InputStream in = storageHandler.newKeyReader(keyArgs)) {
+      in.read(data);
+    }
+    keyInfo = keySpaceManager.lookupKey(ksmKeyArgs);
+    assertEquals(dataString, DFSUtil.bytes2String(data));
+    assertEquals(1, keyInfo.getLatestVersionLocations().getVersion());
+    assertEquals(2,
+        keyInfo.getLatestVersionLocations().getLocationList().size());
+
+    dataString = RandomStringUtils.randomAlphabetic(200);
+    data = new byte[dataString.length()];
+    try (OutputStream stream = storageHandler.newKeyWriter(keyArgs)) {
+      stream.write(dataString.getBytes());
+    }
+    try (InputStream in = storageHandler.newKeyReader(keyArgs)) {
+      in.read(data);
+    }
+    keyInfo = keySpaceManager.lookupKey(ksmKeyArgs);
+    assertEquals(dataString, DFSUtil.bytes2String(data));
+    assertEquals(2, keyInfo.getLatestVersionLocations().getVersion());
+    assertEquals(3,
+        keyInfo.getLatestVersionLocations().getLocationList().size());
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestMultipleContainerReadWrite.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestMultipleContainerReadWrite.java
new file mode 100644
index 0000000..765ec3f
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestMultipleContainerReadWrite.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.LinkedList;
+
+import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Test key write/read where a key can span multiple containers.
+ */
+public class TestMultipleContainerReadWrite {
+  private static MiniOzoneCluster cluster = null;
+  private static StorageHandler storageHandler;
+  private static UserArgs userArgs;
+  private static OzoneConfiguration conf;
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    // set to as small as 100 bytes per block.
+    conf.setLong(OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_IN_MB, 1);
+    conf.setInt(ScmConfigKeys.OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE, 5);
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    storageHandler = new ObjectStoreHandler(conf).getStorageHandler();
+    userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testWriteRead() throws Exception {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    String dataString = RandomStringUtils.randomAscii(3 * (int)OzoneConsts.MB);
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    keyArgs.setSize(3 * (int)OzoneConsts.MB);
+
+    try (OutputStream outputStream = storageHandler.newKeyWriter(keyArgs)) {
+      outputStream.write(dataString.getBytes());
+    }
+
+    byte[] data = new byte[dataString.length()];
+    try (InputStream inputStream = storageHandler.newKeyReader(keyArgs)) {
+      inputStream.read(data, 0, data.length);
+    }
+    assertEquals(dataString, new String(data));
+    // checking whether container meta data has the chunk file persisted.
+    MetricsRecordBuilder containerMetrics = getMetrics(
+        "StorageContainerMetrics");
+    assertCounter("numWriteChunk", 3L, containerMetrics);
+    assertCounter("numReadChunk", 3L, containerMetrics);
+  }
+
+  // Disable this test, because this tests assumes writing beyond a specific
+  // size is not allowed. Which is not true for now. Keeping this test in case
+  // we add this restrict in the future.
+  @Ignore
+  @Test
+  public void testErrorWrite() throws Exception {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    String dataString1 = RandomStringUtils.randomAscii(100);
+    String dataString2 = RandomStringUtils.randomAscii(500);
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    keyArgs.setSize(500);
+
+    try (OutputStream outputStream = storageHandler.newKeyWriter(keyArgs)) {
+      // first write will write succeed
+      outputStream.write(dataString1.getBytes());
+      // second write
+      exception.expect(IOException.class);
+      exception.expectMessage(
+          "Can not write 500 bytes with only 400 byte space");
+      outputStream.write(dataString2.getBytes());
+    }
+  }
+
+  @Test
+  public void testPartialRead() throws Exception {
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+
+    VolumeArgs createVolumeArgs = new VolumeArgs(volumeName, userArgs);
+    createVolumeArgs.setUserName(userName);
+    createVolumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(createVolumeArgs);
+
+    BucketArgs bucketArgs = new BucketArgs(bucketName, createVolumeArgs);
+    bucketArgs.setAddAcls(new LinkedList<>());
+    bucketArgs.setRemoveAcls(new LinkedList<>());
+    bucketArgs.setStorageType(StorageType.DISK);
+    storageHandler.createBucket(bucketArgs);
+
+    String dataString = RandomStringUtils.randomAscii(500);
+    KeyArgs keyArgs = new KeyArgs(volumeName, bucketName, keyName, userArgs);
+    keyArgs.setSize(500);
+
+    try (OutputStream outputStream = storageHandler.newKeyWriter(keyArgs)) {
+      outputStream.write(dataString.getBytes());
+    }
+
+    byte[] data = new byte[600];
+    try (InputStream inputStream = storageHandler.newKeyReader(keyArgs)) {
+      int readLen = inputStream.read(data, 0, 340);
+      assertEquals(340, readLen);
+      assertEquals(dataString.substring(0, 340),
+          new String(data).substring(0, 340));
+
+      readLen = inputStream.read(data, 340, 260);
+      assertEquals(160, readLen);
+      assertEquals(dataString, new String(data).substring(0, 500));
+
+      readLen = inputStream.read(data, 500, 1);
+      assertEquals(-1, readLen);
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
new file mode 100644
index 0000000..31046f5
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
@@ -0,0 +1,800 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ozShell;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Random;
+import java.util.UUID;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.OzoneAcl.OzoneACLRights;
+import org.apache.hadoop.ozone.OzoneAcl.OzoneACLType;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneKey;
+import org.apache.hadoop.ozone.web.client.OzoneRestClient;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+/**
+ * This test class specified for testing Ozone shell command.
+ */
+public class TestOzoneShell {
+
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static String url;
+  private static File baseDir;
+  private static OzoneConfiguration conf = null;
+  private static MiniOzoneCluster cluster = null;
+  private static OzoneRestClient client = null;
+  private static Shell shell = null;
+
+  private final ByteArrayOutputStream out = new ByteArrayOutputStream();
+  private final ByteArrayOutputStream err = new ByteArrayOutputStream();
+  private static final PrintStream OLD_OUT = System.out;
+  private static final PrintStream OLD_ERR = System.err;
+
+  /**
+   * Create a MiniDFSCluster for testing with using distributed Ozone
+   * handler type.
+   *
+   * @throws Exception
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+
+    String path = GenericTestUtils.getTempPath(
+        TestOzoneShell.class.getSimpleName());
+    baseDir = new File(path);
+    baseDir.mkdirs();
+
+    path += conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+    conf.setQuietMode(false);
+    shell = new Shell();
+    shell.setConf(conf);
+
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    final int port = cluster.getHddsDatanodes().get(0).getDatanodeDetails()
+        .getOzoneRestPort();
+    url = String.format("http://localhost:%d", port);
+    client = new OzoneRestClient(String.format("http://localhost:%d", port));
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+  }
+
+  /**
+   * shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+
+    if (baseDir != null) {
+      FileUtil.fullyDelete(baseDir, true);
+    }
+  }
+
+  @Before
+  public void setup() {
+    System.setOut(new PrintStream(out));
+    System.setErr(new PrintStream(err));
+  }
+
+  @After
+  public void reset() {
+    // reset stream after each unit test
+    out.reset();
+    err.reset();
+
+    // restore system streams
+    System.setOut(OLD_OUT);
+    System.setErr(OLD_ERR);
+  }
+
+  @Test
+  public void testCreateVolume() throws Exception {
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String userName = "bilbo";
+    String[] args = new String[] {"-createVolume", url + "/" + volumeName,
+        "-user", userName, "-root"};
+
+    assertEquals(0, ToolRunner.run(shell, args));
+    OzoneVolume volumeInfo = client.getVolume(volumeName);
+    assertEquals(volumeName, volumeInfo.getVolumeName());
+    assertEquals(userName, volumeInfo.getOwnerName());
+  }
+
+  @Test
+  public void testDeleteVolume() throws Exception {
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(vol);
+
+    String[] args = new String[] {"-deleteVolume", url + "/" + volumeName,
+        "-root"};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    // verify if volume has been deleted
+    try {
+      client.getVolume(volumeName);
+      fail("Get volume call should have thrown.");
+    } catch (OzoneException e) {
+      GenericTestUtils.assertExceptionContains(
+          "Info Volume failed, error:VOLUME_NOT_FOUND", e);
+    }
+  }
+
+  @Test
+  public void testInfoVolume() throws Exception {
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    client.createVolume(volumeName, "bilbo", "100TB");
+
+    String[] args = new String[] {"-infoVolume", url + "/" + volumeName,
+        "-root"};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    String output = out.toString();
+    assertTrue(output.contains(volumeName));
+    assertTrue(output.contains("createdOn")
+        && output.contains(OzoneConsts.OZONE_TIME_ZONE));
+
+    // get info for non-exist volume
+    args = new String[] {"-infoVolume", url + "/invalid-volume", "-root"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Info Volume failed, error:VOLUME_NOT_FOUND"));
+  }
+
+  @Test
+  public void testUpdateVolume() throws Exception {
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String userName = "bilbo";
+    OzoneVolume vol = client.createVolume(volumeName, userName, "100TB");
+    assertEquals(userName, vol.getOwnerName());
+    assertEquals(100, vol.getQuota().getSize(), 100);
+    assertEquals(OzoneQuota.Units.TB, vol.getQuota().getUnit());
+
+    String[] args = new String[] {"-updateVolume", url + "/" + volumeName,
+        "-quota", "500MB", "-root"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    vol = client.getVolume(volumeName);
+    assertEquals(userName, vol.getOwnerName());
+    assertEquals(500, vol.getQuota().getSize(), 500);
+    assertEquals(OzoneQuota.Units.MB, vol.getQuota().getUnit());
+
+    String newUser = "new-user";
+    args = new String[] {"-updateVolume", url + "/" + volumeName,
+        "-user", newUser, "-root"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    vol = client.getVolume(volumeName);
+    assertEquals(newUser, vol.getOwnerName());
+
+    // test error conditions
+    args = new String[] {"-updateVolume", url + "/invalid-volume",
+        "-user", newUser, "-root"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Volume owner change failed, error:VOLUME_NOT_FOUND"));
+
+    err.reset();
+    args = new String[] {"-updateVolume", url + "/invalid-volume",
+        "-quota", "500MB", "-root"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Volume quota change failed, error:VOLUME_NOT_FOUND"));
+  }
+
+  @Test
+  public void testListVolume() throws Exception {
+    String commandOutput;
+    List<VolumeInfo> volumes;
+    final int volCount = 20;
+    final String user1 = "test-user-a";
+    final String user2 = "test-user-b";
+
+    // Create 20 volumes, 10 for user1 and another 10 for user2.
+    for (int x = 0; x < volCount; x++) {
+      String volumeName;
+      String userName;
+
+      if (x % 2 == 0) {
+        // create volume [test-vol0, test-vol2, ..., test-vol18] for user1
+        userName = user1;
+        volumeName = "test-vol" + x;
+      } else {
+        // create volume [test-vol1, test-vol3, ..., test-vol19] for user2
+        userName = user2;
+        volumeName = "test-vol" + x;
+      }
+      OzoneVolume vol = client.createVolume(volumeName, userName, "100TB");
+      assertNotNull(vol);
+    }
+
+    // test -length option
+    String[] args = new String[] {"-listVolume", url + "/", "-user",
+        user1, "-length", "100"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    volumes = (List<VolumeInfo>) JsonUtils
+        .toJsonList(commandOutput, VolumeInfo.class);
+
+    assertEquals(10, volumes.size());
+    for (VolumeInfo volume : volumes) {
+      assertEquals(volume.getOwner().getName(), user1);
+      assertTrue(volume.getCreatedOn().contains(OzoneConsts.OZONE_TIME_ZONE));
+    }
+
+    out.reset();
+    args = new String[] {"-listVolume", url + "/", "-user",
+        user1, "-length", "2"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    volumes = (List<VolumeInfo>) JsonUtils
+        .toJsonList(commandOutput, VolumeInfo.class);
+
+    assertEquals(2, volumes.size());
+
+    // test -prefix option
+    out.reset();
+    args = new String[] {"-listVolume", url + "/", "-user",
+        user1, "-length", "100", "-prefix", "test-vol1"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    volumes = (List<VolumeInfo>) JsonUtils
+        .toJsonList(commandOutput, VolumeInfo.class);
+
+    assertEquals(5, volumes.size());
+    // return volume names should be [test-vol10, test-vol12, ..., test-vol18]
+    for (int i = 0; i < volumes.size(); i++) {
+      assertEquals(volumes.get(i).getVolumeName(), "test-vol" + ((i + 5) * 2));
+      assertEquals(volumes.get(i).getOwner().getName(), user1);
+    }
+
+    // test -start option
+    out.reset();
+    args = new String[] {"-listVolume", url + "/", "-user",
+        user2, "-length", "100", "-start", "test-vol15"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    volumes = (List<VolumeInfo>) JsonUtils
+        .toJsonList(commandOutput, VolumeInfo.class);
+
+    assertEquals(2, volumes.size());
+
+    assertEquals(volumes.get(0).getVolumeName(), "test-vol17");
+    assertEquals(volumes.get(1).getVolumeName(), "test-vol19");
+    assertEquals(volumes.get(0).getOwner().getName(), user2);
+    assertEquals(volumes.get(1).getOwner().getName(), user2);
+
+    // test error conditions
+    err.reset();
+    args  = new String[] {"-listVolume", url + "/", "-user",
+        user2, "-length", "-1"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "the vaule should be a positive number"));
+
+    err.reset();
+    args  = new String[] {"-listVolume", url + "/", "-user",
+        user2, "-length", "invalid-length"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "the vaule should be digital"));
+  }
+
+  @Test
+  public void testCreateBucket() throws Exception {
+    OzoneVolume vol = creatVolume();
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    String[] args = new String[] {"-createBucket",
+        url + "/" + vol.getVolumeName() + "/" + bucketName};
+
+    assertEquals(0, ToolRunner.run(shell, args));
+    OzoneBucket bucketInfo = vol.getBucket(bucketName);
+    assertEquals(vol.getVolumeName(),
+        bucketInfo.getBucketInfo().getVolumeName());
+    assertEquals(bucketName, bucketInfo.getBucketName());
+
+    // test create a bucket in a non-exist volume
+    args = new String[] {"-createBucket",
+        url + "/invalid-volume/" + bucketName};
+
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Info Volume failed, error:VOLUME_NOT_FOUND"));
+  }
+
+  @Test
+  public void testDeleteBucket() throws Exception {
+    OzoneVolume vol = creatVolume();
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    OzoneBucket bucketInfo = vol.createBucket(bucketName);
+    assertNotNull(bucketInfo);
+
+    String[] args = new String[] {"-deleteBucket",
+        url + "/" + vol.getVolumeName() + "/" + bucketName};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    // verify if bucket has been deleted in volume
+    try {
+      vol.getBucket(bucketName);
+      fail("Get bucket should have thrown.");
+    } catch (OzoneException e) {
+      GenericTestUtils.assertExceptionContains(
+          "Info Bucket failed, error: BUCKET_NOT_FOUND", e);
+    }
+
+    // test delete bucket in a non-exist volume
+    args = new String[] {"-deleteBucket",
+        url + "/invalid-volume" + "/" + bucketName};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Info Volume failed, error:VOLUME_NOT_FOUND"));
+
+    err.reset();
+    // test delete non-exist bucket
+    args = new String[] {"-deleteBucket",
+        url + "/" + vol.getVolumeName() + "/invalid-bucket"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Delete Bucket failed, error:BUCKET_NOT_FOUND"));
+  }
+
+  @Test
+  public void testInfoBucket() throws Exception {
+    OzoneVolume vol = creatVolume();
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    vol.createBucket(bucketName);
+
+    String[] args = new String[] {"-infoBucket",
+        url + "/" + vol.getVolumeName() + "/" + bucketName};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    String output = out.toString();
+    assertTrue(output.contains(bucketName));
+    assertTrue(output.contains("createdOn")
+        && output.contains(OzoneConsts.OZONE_TIME_ZONE));
+
+    // test get info from a non-exist bucket
+    args = new String[] {"-infoBucket",
+        url + "/" + vol.getVolumeName() + "/invalid-bucket" + bucketName};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Info Bucket failed, error: BUCKET_NOT_FOUND"));
+  }
+
+  @Test
+  public void testUpdateBucket() throws Exception {
+    OzoneVolume vol = creatVolume();
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    OzoneBucket bucket = vol.createBucket(bucketName);
+    assertEquals(0, bucket.getAcls().size());
+
+    String[] args = new String[] {"-updateBucket",
+        url + "/" + vol.getVolumeName() + "/" + bucketName, "-addAcl",
+        "user:frodo:rw,group:samwise:r"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    String output = out.toString();
+    assertTrue(output.contains("createdOn")
+        && output.contains(OzoneConsts.OZONE_TIME_ZONE));
+
+    bucket = vol.getBucket(bucketName);
+    assertEquals(2, bucket.getAcls().size());
+
+    OzoneAcl acl = bucket.getAcls().get(0);
+    assertTrue(acl.getName().equals("frodo")
+        && acl.getType() == OzoneACLType.USER
+        && acl.getRights()== OzoneACLRights.READ_WRITE);
+
+    args = new String[] {"-updateBucket",
+        url + "/" + vol.getVolumeName() + "/" + bucketName, "-removeAcl",
+        "user:frodo:rw"};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    bucket = vol.getBucket(bucketName);
+    acl = bucket.getAcls().get(0);
+    assertEquals(1, bucket.getAcls().size());
+    assertTrue(acl.getName().equals("samwise")
+        && acl.getType() == OzoneACLType.GROUP
+        && acl.getRights()== OzoneACLRights.READ);
+
+    // test update bucket for a non-exist bucket
+    args = new String[] {"-updateBucket",
+        url + "/" + vol.getVolumeName() + "/invalid-bucket", "-addAcl",
+        "user:frodo:rw"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Setting bucket property failed, error: BUCKET_NOT_FOUND"));
+  }
+
+  @Test
+  public void testListBucket() throws Exception {
+    List<BucketInfo> buckets;
+    String commandOutput;
+    int bucketCount = 11;
+    OzoneVolume vol = creatVolume();
+
+    List<String> bucketNames = new ArrayList<>();
+    // create bucket from test-bucket0 to test-bucket10
+    for (int i = 0; i < bucketCount; i++) {
+      String name = "test-bucket" + i;
+      bucketNames.add(name);
+      OzoneBucket bucket = vol.createBucket(name);
+      assertNotNull(bucket);
+    }
+
+    // test -length option
+    String[] args = new String[] {"-listBucket",
+        url + "/" + vol.getVolumeName(), "-length", "100"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
+        BucketInfo.class);
+
+    assertEquals(11, buckets.size());
+    // sort bucket names since the return buckets isn't in created order
+    Collections.sort(bucketNames);
+    // return bucket names should be [test-bucket0, test-bucket1,
+    // test-bucket10, test-bucket2, ,..., test-bucket9]
+    for (int i = 0; i < buckets.size(); i++) {
+      assertEquals(buckets.get(i).getBucketName(), bucketNames.get(i));
+      assertEquals(buckets.get(i).getVolumeName(), vol.getVolumeName());
+      assertTrue(buckets.get(i).getCreatedOn()
+          .contains(OzoneConsts.OZONE_TIME_ZONE));
+    }
+
+    out.reset();
+    args = new String[] {"-listBucket", url + "/" + vol.getVolumeName(),
+        "-length", "3"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
+        BucketInfo.class);
+
+    assertEquals(3, buckets.size());
+    // return bucket names should be [test-bucket0,
+    // test-bucket1, test-bucket10]
+    assertEquals(buckets.get(0).getBucketName(), "test-bucket0");
+    assertEquals(buckets.get(1).getBucketName(), "test-bucket1");
+    assertEquals(buckets.get(2).getBucketName(), "test-bucket10");
+
+    // test -prefix option
+    out.reset();
+    args = new String[] {"-listBucket", url + "/" + vol.getVolumeName(),
+        "-length", "100", "-prefix", "test-bucket1"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
+        BucketInfo.class);
+
+    assertEquals(2, buckets.size());
+    // return bucket names should be [test-bucket1, test-bucket10]
+    assertEquals(buckets.get(0).getBucketName(), "test-bucket1");
+    assertEquals(buckets.get(1).getBucketName(), "test-bucket10");
+
+    // test -start option
+    out.reset();
+    args = new String[] {"-listBucket", url + "/" + vol.getVolumeName(),
+        "-length", "100", "-start", "test-bucket7"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput,
+        BucketInfo.class);
+
+    assertEquals(2, buckets.size());
+    assertEquals(buckets.get(0).getBucketName(), "test-bucket8");
+    assertEquals(buckets.get(1).getBucketName(), "test-bucket9");
+
+    // test error conditions
+    err.reset();
+    args = new String[] {"-listBucket", url + "/" + vol.getVolumeName(),
+        "-length", "-1"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "the vaule should be a positive number"));
+  }
+
+  @Test
+  public void testPutKey() throws Exception {
+    OzoneBucket bucket = creatBucket();
+    String volumeName = bucket.getBucketInfo().getVolumeName();
+    String bucketName = bucket.getBucketName();
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+
+    String[] args = new String[] {"-putKey",
+        url + "/" + volumeName + "/" + bucketName + "/" + keyName, "-file",
+        createTmpFile()};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    OzoneKey keyInfo = bucket.getKeyInfo(keyName);
+    assertEquals(keyName, keyInfo.getObjectInfo().getKeyName());
+
+    // test put key in a non-exist bucket
+    args = new String[] {"-putKey",
+        url + "/" + volumeName + "/invalid-bucket/" + keyName, "-file",
+        createTmpFile()};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Create key failed, error:BUCKET_NOT_FOUND"));
+  }
+
+  @Test
+  public void testGetKey() throws Exception {
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    OzoneBucket bucket = creatBucket();
+    String volumeName = bucket.getBucketInfo().getVolumeName();
+    String bucketName = bucket.getBucketName();
+
+    String dataStr = "test-data";
+    bucket.putKey(keyName, dataStr);
+
+    String tmpPath = baseDir.getAbsolutePath() + "/testfile-"
+        + UUID.randomUUID().toString();
+    String[] args = new String[] {"-getKey",
+        url + "/" + volumeName + "/" + bucketName + "/" + keyName, "-file",
+        tmpPath};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    byte[] dataBytes = new byte[dataStr.length()];
+    try (FileInputStream randFile = new FileInputStream(new File(tmpPath))) {
+      randFile.read(dataBytes);
+    }
+    assertEquals(dataStr, DFSUtil.bytes2String(dataBytes));
+  }
+
+  @Test
+  public void testDeleteKey() throws Exception {
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    OzoneBucket bucket = creatBucket();
+    String volumeName = bucket.getBucketInfo().getVolumeName();
+    String bucketName = bucket.getBucketName();
+    bucket.putKey(keyName, "test-data");
+
+    OzoneKey keyInfo = bucket.getKeyInfo(keyName);
+    assertEquals(keyName, keyInfo.getObjectInfo().getKeyName());
+
+    String[] args = new String[] {"-deleteKey",
+        url + "/" + volumeName + "/" + bucketName + "/" + keyName};
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    // verify if key has been deleted in the bucket
+    try {
+      bucket.getKeyInfo(keyName);
+      fail("Get key should have thrown.");
+    } catch (OzoneException e) {
+      GenericTestUtils.assertExceptionContains(
+          "Lookup key failed, error:KEY_NOT_FOUND", e);
+    }
+
+    // test delete key in a non-exist bucket
+    args = new String[] {"-deleteKey",
+        url + "/" + volumeName + "/invalid-bucket/" + keyName};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Info Bucket failed, error: BUCKET_NOT_FOUND"));
+
+    err.reset();
+    // test delete a non-exist key in bucket
+    args = new String[] {"-deleteKey",
+        url + "/" + volumeName + "/" + bucketName + "/invalid-key"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Delete key failed, error:KEY_NOT_FOUND"));
+  }
+
+  @Test
+  public void testInfoKey() throws Exception {
+    String keyName = "key" + RandomStringUtils.randomNumeric(5);
+    OzoneBucket bucket = creatBucket();
+    String volumeName = bucket.getBucketInfo().getVolumeName();
+    String bucketName = bucket.getBucketName();
+    bucket.putKey(keyName, "test-data");
+
+    String[] args = new String[] {"-infoKey",
+        url + "/" + volumeName + "/" + bucketName + "/" + keyName};
+
+    // verify the response output
+    assertEquals(0, ToolRunner.run(shell, args));
+
+    String output = out.toString();
+    assertTrue(output.contains(keyName));
+    assertTrue(output.contains("createdOn") && output.contains("modifiedOn")
+        && output.contains(OzoneConsts.OZONE_TIME_ZONE));
+
+    // reset stream
+    out.reset();
+    err.reset();
+
+    // get the info of a non-exist key
+    args = new String[] {"-infoKey",
+        url + "/" + volumeName + "/" + bucketName + "/invalid-key"};
+
+    // verify the response output
+    // get the non-exist key info should be failed
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "Lookup key failed, error:KEY_NOT_FOUND"));
+  }
+
+  @Test
+  public void testListKey() throws Exception {
+    String commandOutput;
+    List<KeyInfo> keys;
+    int keyCount = 11;
+    OzoneBucket bucket = creatBucket();
+    String volumeName = bucket.getBucketInfo().getVolumeName();
+    String bucketName = bucket.getBucketName();
+
+    String keyName;
+    List<String> keyNames = new ArrayList<>();
+    for (int i = 0; i < keyCount; i++) {
+      keyName = "test-key" + i;
+      keyNames.add(keyName);
+      bucket.putKey(keyName, "test-data" + i);
+    }
+
+    // test -length option
+    String[] args = new String[] {"-listKey",
+        url + "/" + volumeName + "/" + bucketName, "-length", "100"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
+        KeyInfo.class);
+
+    assertEquals(11, keys.size());
+    // sort key names since the return keys isn't in created order
+    Collections.sort(keyNames);
+    // return key names should be [test-key0, test-key1,
+    // test-key10, test-key2, ,..., test-key9]
+    for (int i = 0; i < keys.size(); i++) {
+      assertEquals(keys.get(i).getKeyName(), keyNames.get(i));
+      // verify the creation/modification time of key
+      assertTrue(keys.get(i).getCreatedOn()
+          .contains(OzoneConsts.OZONE_TIME_ZONE));
+      assertTrue(keys.get(i).getModifiedOn()
+          .contains(OzoneConsts.OZONE_TIME_ZONE));
+    }
+
+    out.reset();
+    args = new String[] {"-listKey", url + "/" + volumeName + "/" + bucketName,
+        "-length", "3"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
+        KeyInfo.class);
+
+    assertEquals(3, keys.size());
+    // return key names should be [test-key0, test-key1, test-key10]
+    assertEquals(keys.get(0).getKeyName(), "test-key0");
+    assertEquals(keys.get(1).getKeyName(), "test-key1");
+    assertEquals(keys.get(2).getKeyName(), "test-key10");
+
+    // test -prefix option
+    out.reset();
+    args = new String[] {"-listKey", url + "/" + volumeName + "/" + bucketName,
+        "-length", "100", "-prefix", "test-key1"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
+        KeyInfo.class);
+
+    assertEquals(2, keys.size());
+    // return key names should be [test-key1, test-key10]
+    assertEquals(keys.get(0).getKeyName(), "test-key1");
+    assertEquals(keys.get(1).getKeyName(), "test-key10");
+
+    // test -start option
+    out.reset();
+    args = new String[] {"-listKey", url + "/" + volumeName + "/" + bucketName,
+        "-length", "100", "-start", "test-key7"};
+    assertEquals(0, ToolRunner.run(shell, args));
+    commandOutput = out.toString();
+    keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput,
+        KeyInfo.class);
+
+    assertEquals(keys.get(0).getKeyName(), "test-key8");
+    assertEquals(keys.get(1).getKeyName(), "test-key9");
+
+    // test error conditions
+    err.reset();
+    args = new String[] {"-listKey", url + "/" + volumeName + "/" + bucketName,
+        "-length", "-1"};
+    assertEquals(1, ToolRunner.run(shell, args));
+    assertTrue(err.toString().contains(
+        "the vaule should be a positive number"));
+  }
+
+  private OzoneVolume creatVolume() throws OzoneException {
+    String volumeName = UUID.randomUUID().toString() + "volume";
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+
+    return vol;
+  }
+
+  private OzoneBucket creatBucket() throws OzoneException {
+    OzoneVolume vol = creatVolume();
+    String bucketName = UUID.randomUUID().toString() + "bucket";
+    OzoneBucket bucketInfo = vol.createBucket(bucketName);
+
+    return bucketInfo;
+  }
+
+  /**
+   * Create a temporary file used for putting key.
+   * @return the created file's path string
+   * @throws Exception
+   */
+  private String createTmpFile() throws Exception {
+    // write a new file that used for putting key
+    File tmpFile = new File(baseDir,
+        "/testfile-" + UUID.randomUUID().toString());
+    FileOutputStream randFile = new FileOutputStream(tmpFile);
+    Random r = new Random();
+    for (int x = 0; x < 10; x++) {
+      char c = (char) (r.nextInt(26) + 'a');
+      randFile.write(c);
+    }
+    randFile.close();
+
+    return tmpFile.getAbsolutePath();
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java
new file mode 100644
index 0000000..275fadb
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+
+/**
+ * Test allocate container calls.
+ */
+public class TestAllocateContainer {
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+  private static StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private static XceiverClientManager xceiverClientManager;
+  private static String containerOwner = "OZONE";
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(3).build();
+    cluster.waitForClusterToBeReady();
+    storageContainerLocationClient =
+        cluster.getStorageContainerLocationClient();
+    xceiverClientManager = new XceiverClientManager(conf);
+  }
+
+  @AfterClass
+  public static void shutdown() throws InterruptedException {
+    if(cluster != null) {
+      cluster.shutdown();
+    }
+    IOUtils.cleanupWithLogger(null, storageContainerLocationClient);
+  }
+
+  @Test
+  public void testAllocate() throws Exception {
+    Pipeline pipeline = storageContainerLocationClient.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(),
+        "container0", containerOwner);
+    Assert.assertNotNull(pipeline);
+    Assert.assertNotNull(pipeline.getLeader());
+
+  }
+
+  @Test
+  public void testAllocateNull() throws Exception {
+    thrown.expect(NullPointerException.class);
+    storageContainerLocationClient.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), null, containerOwner);
+  }
+
+  @Test
+  public void testAllocateDuplicate() throws Exception {
+    String containerName = RandomStringUtils.randomAlphanumeric(10);
+    thrown.expect(IOException.class);
+    thrown.expectMessage("Specified container already exists");
+    storageContainerLocationClient.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), containerName, containerOwner);
+    storageContainerLocationClient.allocateContainer(
+        xceiverClientManager.getType(),
+        xceiverClientManager.getFactor(), containerName, containerOwner);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
new file mode 100644
index 0000000..a5d0eac
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
@@ -0,0 +1,300 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright containerOwnership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.block.BlockManagerImpl;
+import org.apache.hadoop.hdds.scm.container.ContainerMapping;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms.ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementCapacity;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.ozone.scm.cli.SQLCLI;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.UUID;
+
+import static org.apache.hadoop.ozone.OzoneConsts.BLOCK_DB;
+import static org.apache.hadoop.ozone.OzoneConsts.SCM_CONTAINER_DB;
+import static org.apache.hadoop.ozone.OzoneConsts.KB;
+import static org.apache.hadoop.ozone.OzoneConsts.NODEPOOL_DB;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * This class tests the CLI that transforms container into SQLite DB files.
+ */
+@RunWith(Parameterized.class)
+public class TestContainerSQLCli {
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList(new Object[][] {
+        {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB},
+        {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB}
+    });
+  }
+
+  private static String metaStoreType;
+
+  public TestContainerSQLCli(String type) {
+    metaStoreType = type;
+  }
+
+  private static SQLCLI cli;
+
+  private MiniOzoneCluster cluster;
+  private OzoneConfiguration conf;
+  private String datanodeIpAddress;
+
+  private ContainerMapping mapping;
+  private NodeManager nodeManager;
+  private BlockManagerImpl blockManager;
+
+  private Pipeline pipeline1;
+  private Pipeline pipeline2;
+
+  private HashMap<String, String> blockContainerMap;
+
+  private final static long DEFAULT_BLOCK_SIZE = 4 * KB;
+  private static HddsProtos.ReplicationFactor factor;
+  private static HddsProtos.ReplicationType type;
+  private static final String CONTAINER_OWNER = "OZONE";
+
+
+  @Before
+  public void setup() throws Exception {
+    blockContainerMap = new HashMap<>();
+
+    conf = new OzoneConfiguration();
+    conf.setInt(ScmConfigKeys.OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE, 2);
+    conf.setClass(ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
+        SCMContainerPlacementCapacity.class, ContainerPlacementPolicy.class);
+    if(conf.getBoolean(ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY,
+        ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT)){
+      factor = HddsProtos.ReplicationFactor.THREE;
+      type = HddsProtos.ReplicationType.RATIS;
+    } else {
+      factor = HddsProtos.ReplicationFactor.ONE;
+      type = HddsProtos.ReplicationType.STAND_ALONE;
+    }
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(2).build();
+    cluster.waitForClusterToBeReady();
+    datanodeIpAddress = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getIpAddress();
+    cluster.getKeySpaceManager().stop();
+    cluster.getStorageContainerManager().stop();
+
+    nodeManager = cluster.getStorageContainerManager().getScmNodeManager();
+    mapping = new ContainerMapping(conf, nodeManager, 128);
+    blockManager = new BlockManagerImpl(conf, nodeManager, mapping, 128);
+
+    // blockManager.allocateBlock() will create containers if there is none
+    // stored in levelDB. The number of containers to create is the value of
+    // OZONE_SCM_CONTAINER_PROVISION_BATCH_SIZE which we set to 2.
+    // so the first allocateBlock() will create two containers. A random one
+    // is assigned for the block.
+
+    // loop until both the two datanodes are up, try up to about 4 seconds.
+    for (int c = 0; c < 40; c++) {
+      if (nodeManager.getAllNodes().size() == 2) {
+        break;
+      }
+      Thread.sleep(100);
+    }
+    assertEquals(2, nodeManager.getAllNodes().size());
+    AllocatedBlock ab1 = blockManager.allocateBlock(DEFAULT_BLOCK_SIZE, type,
+        factor, CONTAINER_OWNER);
+    pipeline1 = ab1.getPipeline();
+    blockContainerMap.put(ab1.getKey(), pipeline1.getContainerName());
+
+    AllocatedBlock ab2;
+    // we want the two blocks on the two provisioned containers respectively,
+    // however blockManager picks containers randomly, keep retry until we
+    // assign the second block to the other container. This seems to be the only
+    // way to get the two containers.
+    // although each retry will create a block and assign to a container. So
+    // the size of blockContainerMap will vary each time the test is run.
+    while (true) {
+      ab2 = blockManager
+          .allocateBlock(DEFAULT_BLOCK_SIZE, type, factor, CONTAINER_OWNER);
+      pipeline2 = ab2.getPipeline();
+      blockContainerMap.put(ab2.getKey(), pipeline2.getContainerName());
+      if (!pipeline1.getContainerName().equals(pipeline2.getContainerName())) {
+        break;
+      }
+    }
+
+    blockManager.close();
+    mapping.close();
+    nodeManager.close();
+
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL, metaStoreType);
+    cli = new SQLCLI(conf);
+
+  }
+
+  @After
+  public void shutdown() throws InterruptedException {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testConvertBlockDB() throws Exception {
+    String dbOutPath = GenericTestUtils.getTempPath(
+        UUID.randomUUID() + "/out_sql.db");
+    String dbRootPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
+    String dbPath = dbRootPath + "/" + BLOCK_DB;
+    String[] args = {"-p", dbPath, "-o", dbOutPath};
+
+    cli.run(args);
+
+    Connection conn = connectDB(dbOutPath);
+    String sql = "SELECT * FROM blockContainer";
+    ResultSet rs = executeQuery(conn, sql);
+    while(rs.next()) {
+      String blockKey = rs.getString("blockKey");
+      String containerName = rs.getString("containerName");
+      assertTrue(blockContainerMap.containsKey(blockKey) &&
+          blockContainerMap.remove(blockKey).equals(containerName));
+    }
+    assertEquals(0, blockContainerMap.size());
+    Files.delete(Paths.get(dbOutPath));
+  }
+
+  @Test
+  public void testConvertNodepoolDB() throws Exception {
+    String dbOutPath = GenericTestUtils.getTempPath(
+        UUID.randomUUID() + "/out_sql.db");
+    String dbRootPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
+    String dbPath = dbRootPath + "/" + NODEPOOL_DB;
+    String[] args = {"-p", dbPath, "-o", dbOutPath};
+
+    cli.run(args);
+
+    // verify the sqlite db
+    HashMap<String, String> expectedPool = new HashMap<>();
+    for (DatanodeDetails dnid : nodeManager.getAllNodes()) {
+      expectedPool.put(dnid.getUuidString(), "DefaultNodePool");
+    }
+    Connection conn = connectDB(dbOutPath);
+    String sql = "SELECT * FROM nodePool";
+    ResultSet rs = executeQuery(conn, sql);
+    while(rs.next()) {
+      String datanodeUUID = rs.getString("datanodeUUID");
+      String poolName = rs.getString("poolName");
+      assertTrue(expectedPool.remove(datanodeUUID).equals(poolName));
+    }
+    assertEquals(0, expectedPool.size());
+
+    Files.delete(Paths.get(dbOutPath));
+  }
+
+  @Test
+  public void testConvertContainerDB() throws Exception {
+    String dbOutPath = GenericTestUtils.getTempPath(
+        UUID.randomUUID() + "/out_sql.db");
+    // TODO : the following will fail due to empty Datanode list, need to fix.
+    //String dnUUID = cluster.getDataNodes().get(0).getUuid();
+    String dbRootPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
+    String dbPath = dbRootPath + "/" + SCM_CONTAINER_DB;
+    String[] args = {"-p", dbPath, "-o", dbOutPath};
+    Connection conn;
+    String sql;
+    ResultSet rs;
+
+    cli.run(args);
+
+    //verify the sqlite db
+    // only checks the container names are as expected. Because other fields
+    // such as datanode UUID are generated randomly each time
+    conn = connectDB(dbOutPath);
+    sql = "SELECT * FROM containerInfo";
+    rs = executeQuery(conn, sql);
+    ArrayList<String> containerNames = new ArrayList<>();
+    while (rs.next()) {
+      containerNames.add(rs.getString("containerName"));
+      //assertEquals(dnUUID, rs.getString("leaderUUID"));
+    }
+    assertTrue(containerNames.size() == 2 &&
+        containerNames.contains(pipeline1.getContainerName()) &&
+        containerNames.contains(pipeline2.getContainerName()));
+
+    sql = "SELECT * FROM containerMembers";
+    rs = executeQuery(conn, sql);
+    containerNames = new ArrayList<>();
+    while (rs.next()) {
+      containerNames.add(rs.getString("containerName"));
+      //assertEquals(dnUUID, rs.getString("datanodeUUID"));
+    }
+    assertTrue(containerNames.size() == 2 &&
+        containerNames.contains(pipeline1.getContainerName()) &&
+        containerNames.contains(pipeline2.getContainerName()));
+
+    sql = "SELECT * FROM datanodeInfo";
+    rs = executeQuery(conn, sql);
+    int count = 0;
+    while (rs.next()) {
+      assertEquals(datanodeIpAddress, rs.getString("ipAddress"));
+      //assertEquals(dnUUID, rs.getString("datanodeUUID"));
+      count += 1;
+    }
+    // the two containers maybe on the same datanode, maybe not.
+    int expected = pipeline1.getLeader().getUuid().equals(
+        pipeline2.getLeader().getUuid())? 1 : 2;
+    assertEquals(expected, count);
+    Files.delete(Paths.get(dbOutPath));
+  }
+
+  private ResultSet executeQuery(Connection conn, String sql)
+      throws SQLException {
+    Statement stmt = conn.createStatement();
+    return stmt.executeQuery(sql);
+  }
+
+  private Connection connectDB(String dbPath) throws Exception {
+    Class.forName("org.sqlite.JDBC");
+    String connectPath =
+        String.format("jdbc:sqlite:%s", dbPath);
+    return DriverManager.getConnection(connectPath);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSmallFile.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSmallFile.java
new file mode 100644
index 0000000..d75b66c
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSmallFile.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms.ContainerPlacementPolicy;
+import org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementCapacity;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.util.UUID;
+
+/**
+ * Test Container calls.
+ */
+public class TestContainerSmallFile {
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration ozoneConfig;
+  private static StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private static XceiverClientManager xceiverClientManager;
+  private static String containerOwner = "OZONE";
+
+  @BeforeClass
+  public static void init() throws Exception {
+    ozoneConfig = new OzoneConfiguration();
+    ozoneConfig.setClass(ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
+        SCMContainerPlacementCapacity.class, ContainerPlacementPolicy.class);
+    cluster = MiniOzoneCluster.newBuilder(ozoneConfig).setNumDatanodes(1)
+        .build();
+    cluster.waitForClusterToBeReady();
+    storageContainerLocationClient = cluster
+        .getStorageContainerLocationClient();
+    xceiverClientManager = new XceiverClientManager(ozoneConfig);
+  }
+
+  @AfterClass
+  public static void shutdown() throws InterruptedException {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+    IOUtils.cleanupWithLogger(null, storageContainerLocationClient);
+  }
+
+  @Test
+  public void testAllocateWrite() throws Exception {
+    String traceID = UUID.randomUUID().toString();
+    String containerName = "container0";
+    Pipeline pipeline =
+        storageContainerLocationClient.allocateContainer(
+            xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, containerName, containerOwner);
+    XceiverClientSpi client = xceiverClientManager.acquireClient(pipeline);
+    ContainerProtocolCalls.createContainer(client, traceID);
+
+    ContainerProtocolCalls.writeSmallFile(client, containerName,
+        "key", "data123".getBytes(), traceID);
+    ContainerProtos.GetSmallFileResponseProto response =
+        ContainerProtocolCalls.readSmallFile(client, containerName, "key",
+            traceID);
+    String readData = response.getData().getData().toStringUtf8();
+    Assert.assertEquals("data123", readData);
+    xceiverClientManager.releaseClient(client);
+  }
+
+  @Test
+  public void testInvalidKeyRead() throws Exception {
+    String traceID = UUID.randomUUID().toString();
+    String containerName = "container1";
+    Pipeline pipeline =
+        storageContainerLocationClient.allocateContainer(
+            xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, containerName, containerOwner);
+    XceiverClientSpi client = xceiverClientManager.acquireClient(pipeline);
+    ContainerProtocolCalls.createContainer(client, traceID);
+
+    thrown.expect(StorageContainerException.class);
+    thrown.expectMessage("Unable to find the key");
+
+    // Try to read a Key Container Name
+    ContainerProtos.GetSmallFileResponseProto response =
+        ContainerProtocolCalls.readSmallFile(client, containerName, "key",
+            traceID);
+    xceiverClientManager.releaseClient(client);
+  }
+
+  @Test
+  public void testInvalidContainerRead() throws Exception {
+    String traceID = UUID.randomUUID().toString();
+    String invalidName = "invalidName";
+    String containerName = "container2";
+    Pipeline pipeline =
+        storageContainerLocationClient.allocateContainer(
+            xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, containerName, containerOwner);
+    XceiverClientSpi client = xceiverClientManager.acquireClient(pipeline);
+    ContainerProtocolCalls.createContainer(client, traceID);
+    ContainerProtocolCalls.writeSmallFile(client, containerName,
+        "key", "data123".getBytes(), traceID);
+
+
+    thrown.expect(StorageContainerException.class);
+    thrown.expectMessage("Unable to find the container");
+
+    // Try to read a invalid key
+    ContainerProtos.GetSmallFileResponseProto response =
+        ContainerProtocolCalls.readSmallFile(client, invalidName, "key",
+            traceID);
+    xceiverClientManager.releaseClient(client);
+  }
+}
+
+
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java
new file mode 100644
index 0000000..fffdbff
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java
@@ -0,0 +1,540 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.cli.ResultCode;
+import org.apache.hadoop.hdds.scm.cli.SCMCLI;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.CLOSED;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.OPEN;
+
+import static org.apache.hadoop.hdds.scm.cli.ResultCode.EXECUTION_ERROR;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.Assert.assertFalse;
+/**
+ * This class tests the CLI of SCM.
+ */
+public class TestSCMCli {
+  private static SCMCLI cli;
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+  private static StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+
+  private static StorageContainerManager scm;
+  private static ScmClient containerOperationClient;
+
+  private static ByteArrayOutputStream outContent;
+  private static PrintStream outStream;
+  private static ByteArrayOutputStream errContent;
+  private static PrintStream errStream;
+  private static XceiverClientManager xceiverClientManager;
+  private static String containerOwner = "OZONE";
+
+  @Rule
+  public Timeout globalTimeout = new Timeout(30000);
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(3).build();
+    cluster.waitForClusterToBeReady();
+    xceiverClientManager = new XceiverClientManager(conf);
+    storageContainerLocationClient =
+        cluster.getStorageContainerLocationClient();
+    containerOperationClient = new ContainerOperationClient(
+        storageContainerLocationClient, new XceiverClientManager(conf));
+    outContent = new ByteArrayOutputStream();
+    outStream = new PrintStream(outContent);
+    errContent = new ByteArrayOutputStream();
+    errStream = new PrintStream(errContent);
+    cli = new SCMCLI(containerOperationClient, outStream, errStream);
+    scm = cluster.getStorageContainerManager();
+  }
+
+  private int runCommandAndGetOutput(String[] cmd,
+      ByteArrayOutputStream out,
+      ByteArrayOutputStream err) throws Exception {
+    PrintStream cmdOutStream = System.out;
+    PrintStream cmdErrStream = System.err;
+    if(out != null) {
+      cmdOutStream = new PrintStream(out);
+    }
+    if (err != null) {
+      cmdErrStream = new PrintStream(err);
+    }
+    ScmClient client = new ContainerOperationClient(
+        storageContainerLocationClient, new XceiverClientManager(conf));
+    SCMCLI scmCLI = new SCMCLI(client, cmdOutStream, cmdErrStream);
+    return scmCLI.run(cmd);
+  }
+
+  @AfterClass
+  public static void shutdown() throws InterruptedException {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+    IOUtils.cleanupWithLogger(null, storageContainerLocationClient);
+  }
+
+  @Test
+  public void testCreateContainer() throws Exception {
+    String containerName =  "containerTestCreate";
+    try {
+      scm.getContainer(containerName);
+      fail("should not be able to get the container");
+    } catch (IOException ioe) {
+      assertTrue(ioe.getMessage().contains(
+          "Specified key does not exist. key : " + containerName));
+    }
+    String[] args = {"-container", "-create", "-c", containerName};
+    assertEquals(ResultCode.SUCCESS, cli.run(args));
+    Pipeline container = scm.getContainer(containerName);
+    assertNotNull(container);
+    assertEquals(containerName, container.getContainerName());
+  }
+
+  private boolean containerExist(String containerName) {
+    try {
+      Pipeline scmPipeline = scm.getContainer(containerName);
+      return scmPipeline != null
+          && containerName.equals(scmPipeline.getContainerName());
+    } catch (IOException e) {
+      return false;
+    }
+  }
+
+  @Test
+  public void testDeleteContainer() throws Exception {
+    String containerName;
+    ContainerData containerData;
+    Pipeline pipeline;
+    String[] delCmd;
+    ByteArrayOutputStream testErr;
+    int exitCode;
+
+    // ****************************************
+    // 1. Test to delete a non-empty container.
+    // ****************************************
+    // Create an non-empty container
+    containerName = "non-empty-container";
+    pipeline = containerOperationClient
+        .createContainer(xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, containerName, containerOwner);
+
+    ContainerData cdata = ContainerData
+        .getFromProtBuf(containerOperationClient.readContainer(pipeline), conf);
+    KeyUtils.getDB(cdata, conf).put(containerName.getBytes(),
+        "someKey".getBytes());
+    Assert.assertTrue(containerExist(containerName));
+
+    // Gracefully delete a container should fail because it is open.
+    delCmd = new String[] {"-container", "-delete", "-c", containerName};
+    testErr = new ByteArrayOutputStream();
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    exitCode = runCommandAndGetOutput(delCmd, out, testErr);
+    assertEquals(EXECUTION_ERROR, exitCode);
+    assertTrue(testErr.toString()
+        .contains("Deleting an open container is not allowed."));
+    Assert.assertTrue(containerExist(containerName));
+
+    // Close the container
+    containerOperationClient.closeContainer(pipeline);
+
+    // Gracefully delete a container should fail because it is not empty.
+    testErr = new ByteArrayOutputStream();
+    int exitCode2 = runCommandAndGetOutput(delCmd, out, testErr);
+    assertEquals(EXECUTION_ERROR, exitCode2);
+    assertTrue(testErr.toString()
+        .contains("Container cannot be deleted because it is not empty."));
+    Assert.assertTrue(containerExist(containerName));
+
+    // Try force delete again.
+    delCmd = new String[] {"-container", "-delete", "-c", containerName, "-f"};
+    exitCode = runCommandAndGetOutput(delCmd, out, null);
+    assertEquals("Expected success, found:", ResultCode.SUCCESS, exitCode);
+    assertFalse(containerExist(containerName));
+
+    // ****************************************
+    // 2. Test to delete an empty container.
+    // ****************************************
+    // Create an empty container
+    containerName = "empty-container";
+    pipeline = containerOperationClient
+        .createContainer(xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, containerName, containerOwner);
+    containerOperationClient.closeContainer(pipeline);
+    Assert.assertTrue(containerExist(containerName));
+
+    // Successfully delete an empty container.
+    delCmd = new String[] {"-container", "-delete", "-c", containerName};
+    exitCode = runCommandAndGetOutput(delCmd, out, null);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+    assertFalse(containerExist(containerName));
+
+    // After the container is deleted,
+    // a same name container can now be recreated.
+    containerOperationClient.createContainer(xceiverClientManager.getType(),
+        HddsProtos.ReplicationFactor.ONE, containerName, containerOwner);
+    Assert.assertTrue(containerExist(containerName));
+
+    // ****************************************
+    // 3. Test to delete a non-exist container.
+    // ****************************************
+    containerName = "non-exist-container";
+    delCmd = new String[] {"-container", "-delete", "-c", containerName};
+    testErr = new ByteArrayOutputStream();
+    exitCode = runCommandAndGetOutput(delCmd, out, testErr);
+    assertEquals(EXECUTION_ERROR, exitCode);
+    assertTrue(testErr.toString()
+        .contains("Specified key does not exist."));
+  }
+
+  @Test
+  public void testInfoContainer() throws Exception {
+    // The cluster has one Datanode server.
+    DatanodeDetails datanodeDetails = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails();
+    String formatStr =
+        "Container Name: %s\n" +
+        "Container State: %s\n" +
+        "Container DB Path: %s\n" +
+        "Container Path: %s\n" +
+        "Container Metadata: {%s}\n" +
+        "LeaderID: %s\n" +
+        "Datanodes: [%s]\n";
+
+    String formatStrWithHash =
+        "Container Name: %s\n" +
+        "Container State: %s\n" +
+        "Container Hash: %s\n" +
+        "Container DB Path: %s\n" +
+        "Container Path: %s\n" +
+        "Container Metadata: {%s}\n" +
+        "LeaderID: %s\n" +
+        "Datanodes: [%s]\n";
+
+    // Test a non-exist container
+    String cname = "nonExistContainer";
+    String[] info = {"-container", "-info", cname};
+    int exitCode = runCommandAndGetOutput(info, null, null);
+    assertEquals("Expected Execution Error, Did not find that.",
+        EXECUTION_ERROR, exitCode);
+
+    // Create an empty container.
+    cname = "ContainerTestInfo1";
+    Pipeline pipeline = containerOperationClient
+        .createContainer(xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, cname, containerOwner);
+    ContainerData data = ContainerData
+        .getFromProtBuf(containerOperationClient.readContainer(pipeline), conf);
+
+    info = new String[]{"-container", "-info", "-c", cname};
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    exitCode = runCommandAndGetOutput(info, out, null);
+    assertEquals("Expected Success, did not find it.", ResultCode.SUCCESS,
+            exitCode);
+
+    String openStatus = data.isOpen() ? "OPEN" : "CLOSED";
+    String expected = String.format(formatStr, cname, openStatus,
+        data.getDBPath(), data.getContainerPath(), "",
+        datanodeDetails.getHostName(), datanodeDetails.getHostName());
+    assertEquals(expected, out.toString());
+
+    out.reset();
+
+    // Create an non-empty container
+    cname = "ContainerTestInfo2";
+    pipeline = containerOperationClient
+        .createContainer(xceiverClientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, cname, containerOwner);
+    data = ContainerData
+        .getFromProtBuf(containerOperationClient.readContainer(pipeline), conf);
+    KeyUtils.getDB(data, conf).put(cname.getBytes(), "someKey".getBytes());
+
+    info = new String[]{"-container", "-info", "-c", cname};
+    exitCode = runCommandAndGetOutput(info, out, null);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+
+    openStatus = data.isOpen() ? "OPEN" : "CLOSED";
+    expected = String.format(formatStr, cname, openStatus,
+        data.getDBPath(), data.getContainerPath(), "",
+        datanodeDetails.getHostName(), datanodeDetails.getHostName());
+    assertEquals(expected, out.toString());
+
+    out.reset();
+
+
+    // Close last container and test info again.
+    containerOperationClient.closeContainer(pipeline);
+
+    info = new String[] {"-container", "-info", "-c", cname};
+    exitCode = runCommandAndGetOutput(info, out, null);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+    data = ContainerData
+        .getFromProtBuf(containerOperationClient.readContainer(pipeline), conf);
+
+    openStatus = data.isOpen() ? "OPEN" : "CLOSED";
+    expected = String.format(formatStrWithHash, cname, openStatus,
+        data.getHash(), data.getDBPath(), data.getContainerPath(),
+        "", datanodeDetails.getHostName(), datanodeDetails.getHostName());
+    assertEquals(expected, out.toString());
+  }
+
+  @Test
+  public void testNonExistCommand() throws Exception {
+    PrintStream init = System.out;
+    ByteArrayOutputStream testContent = new ByteArrayOutputStream();
+    PrintStream testPrintOut = new PrintStream(testContent);
+    System.setOut(testPrintOut);
+    String[] args = {"-nothingUseful"};
+    assertEquals(ResultCode.UNRECOGNIZED_CMD, cli.run(args));
+    assertTrue(errContent.toString()
+        .contains("Unrecognized options:[-nothingUseful]"));
+    String expectedOut =
+        "usage: hdfs scmcli <commands> [<options>]\n" +
+        "where <commands> can be one of the following\n" +
+        " -container   Container related options\n";
+    assertEquals(expectedOut, testContent.toString());
+    System.setOut(init);
+  }
+
+  @Test
+  public void testListContainerCommand() throws Exception {
+    // Create 20 containers for testing.
+    String prefix = "ContainerForTesting";
+    for (int index = 0; index < 20; index++) {
+      String containerName = String.format("%s%02d", prefix, index);
+      containerOperationClient.createContainer(xceiverClientManager.getType(),
+          HddsProtos.ReplicationFactor.ONE, containerName, containerOwner);
+    }
+
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    ByteArrayOutputStream err = new ByteArrayOutputStream();
+
+    // Test without -start, -prefix and -count
+    String[] args = new String[] {"-container", "-list"};
+    int exitCode = runCommandAndGetOutput(args, out, err);
+    assertEquals(EXECUTION_ERROR, exitCode);
+    assertTrue(err.toString()
+        .contains("Expecting container count"));
+
+    out.reset();
+    err.reset();
+
+    // Test with -start and -count, the value of -count is negative.
+    args = new String[] {"-container", "-list",
+        "-start", prefix + 0, "-count", "-1"};
+    exitCode = runCommandAndGetOutput(args, out, err);
+    assertEquals(EXECUTION_ERROR, exitCode);
+    assertTrue(err.toString()
+        .contains("-count should not be negative"));
+
+    out.reset();
+    err.reset();
+
+    String startName = String.format("%s%02d", prefix, 0);
+
+    // Test with -start and -count.
+    args = new String[] {"-container", "-list", "-start",
+        startName, "-count", "10"};
+    exitCode = runCommandAndGetOutput(args, out, err);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+    for (int index = 0; index < 10; index++) {
+      String containerName = String.format("%s%02d", prefix, index);
+      assertTrue(out.toString().contains(containerName));
+    }
+
+    out.reset();
+    err.reset();
+
+    // Test with -start, -prefix and -count.
+    startName = String.format("%s%02d", prefix, 0);
+    String prefixName = String.format("%s0", prefix);
+    args = new String[] {"-container", "-list", "-start",
+        startName, "-prefix", prefixName, "-count", "20"};
+    exitCode = runCommandAndGetOutput(args, out, err);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+    for (int index = 0; index < 10; index++) {
+      String containerName = String.format("%s%02d", prefix, index);
+      assertTrue(out.toString().contains(containerName));
+    }
+
+    out.reset();
+    err.reset();
+
+    startName = String.format("%s%02d", prefix, 0);
+    prefixName = String.format("%s0", prefix);
+    args = new String[] {"-container", "-list", "-start",
+        startName, "-prefix", prefixName, "-count", "4"};
+    exitCode = runCommandAndGetOutput(args, out, err);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+    for (int index = 0; index < 4; index++) {
+      String containerName = String.format("%s%02d", prefix, index);
+      assertTrue(out.toString().contains(containerName));
+    }
+
+    out.reset();
+    err.reset();
+
+    prefixName = String.format("%s0", prefix);
+    args = new String[] {"-container", "-list",
+        "-prefix", prefixName, "-count", "6"};
+    exitCode = runCommandAndGetOutput(args, out, err);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+    for (int index = 0; index < 6; index++) {
+      String containerName = String.format("%s%02d", prefix, index);
+      assertTrue(out.toString().contains(containerName));
+    }
+
+    out.reset();
+    err.reset();
+
+    // Test with -start and -prefix, while -count doesn't exist.
+    prefixName = String.format("%s%02d", prefix, 20);
+    args = new String[] {"-container", "-list", "-start",
+        startName, "-prefix", prefixName, "-count", "10"};
+    exitCode = runCommandAndGetOutput(args, out, err);
+    assertEquals(ResultCode.SUCCESS, exitCode);
+    assertTrue(out.toString().isEmpty());
+  }
+
+  @Test
+  public void testCloseContainer() throws Exception {
+    String containerName =  "containerTestClose";
+    String[] args = {"-container", "-create", "-c", containerName};
+    assertEquals(ResultCode.SUCCESS, cli.run(args));
+    Pipeline container = scm.getContainer(containerName);
+    assertNotNull(container);
+    assertEquals(containerName, container.getContainerName());
+
+    ContainerInfo containerInfo = scm.getContainerInfo(containerName);
+    assertEquals(OPEN, containerInfo.getState());
+
+    String[] args1 = {"-container", "-close", "-c", containerName};
+    assertEquals(ResultCode.SUCCESS, cli.run(args1));
+
+    containerInfo = scm.getContainerInfo(containerName);
+    assertEquals(CLOSED, containerInfo.getState());
+
+    // closing this container again will trigger an error.
+    assertEquals(EXECUTION_ERROR, cli.run(args1));
+  }
+
+  @Test
+  public void testHelp() throws Exception {
+    // TODO : this test assertion may break for every new help entry added
+    // may want to disable this test some time later. For now, mainly to show
+    // case the format of help output.
+    PrintStream init = System.out;
+    ByteArrayOutputStream testContent = new ByteArrayOutputStream();
+    PrintStream testPrintOut = new PrintStream(testContent);
+    System.setOut(testPrintOut);
+    String[] args = {"-help"};
+    assertEquals(ResultCode.SUCCESS, cli.run(args));
+    String expected =
+        "usage: hdfs scmcli <commands> [<options>]\n" +
+        "where <commands> can be one of the following\n" +
+        " -container   Container related options\n";
+    assertEquals(expected, testContent.toString());
+    testContent.reset();
+
+    String[] args1 = {"-container", "-help"};
+    assertEquals(ResultCode.SUCCESS, cli.run(args1));
+    String expected1 =
+        "usage: hdfs scm -container <commands> <options>\n" +
+        "where <commands> can be one of the following\n" +
+        " -close    Close container\n" +
+        " -create   Create container\n" +
+        " -delete   Delete container\n" +
+        " -info     Info container\n" +
+        " -list     List container\n";
+
+    assertEquals(expected1, testContent.toString());
+    testContent.reset();
+
+    String[] args2 = {"-container", "-create", "-help"};
+    assertEquals(ResultCode.SUCCESS, cli.run(args2));
+    String expected2 =
+        "usage: hdfs scm -container -create <option>\n" +
+        "where <option> is\n" +
+        " -c <arg>   Specify container name\n";
+    assertEquals(expected2, testContent.toString());
+    testContent.reset();
+
+    String[] args3 = {"-container", "-delete", "-help"};
+    assertEquals(ResultCode.SUCCESS, cli.run(args3));
+    String expected3 =
+        "usage: hdfs scm -container -delete <option>\n" +
+        "where <option> is\n" +
+        " -c <arg>   Specify container name\n" +
+        " -f         forcibly delete a container\n";
+    assertEquals(expected3, testContent.toString());
+    testContent.reset();
+
+    String[] args4 = {"-container", "-info", "-help"};
+    assertEquals(ResultCode.SUCCESS, cli.run(args4));
+    String expected4 =
+        "usage: hdfs scm -container -info <option>\n" +
+        "where <option> is\n" +
+        " -c <arg>   Specify container name\n";
+    assertEquals(expected4, testContent.toString());
+    testContent.reset();
+
+    String[] args5 = {"-container", "-list", "-help"};
+    assertEquals(ResultCode.SUCCESS, cli.run(args5));
+    String expected5 =
+        "usage: hdfs scm -container -list <option>\n" +
+            "where <option> can be the following\n" +
+            " -count <arg>    Specify count number, required\n" +
+            " -prefix <arg>   Specify prefix container name\n" +
+            " -start <arg>    Specify start container name\n";
+    assertEquals(expected5, testContent.toString());
+    testContent.reset();
+
+    System.setOut(init);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMXBean.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMXBean.java
new file mode 100644
index 0000000..27a9404
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMXBean.java
@@ -0,0 +1,161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.scm;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.ContainerStat;
+import org.apache.hadoop.hdds.scm.node.NodeManager;
+import org.junit.BeforeClass;
+import org.junit.AfterClass;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.Map;
+import java.util.Iterator;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeoutException;
+
+import javax.management.openmbean.CompositeData;
+import javax.management.openmbean.TabularData;
+
+/**
+ *
+ * This class is to test JMX management interface for scm information.
+ */
+public class TestSCMMXBean {
+
+  public static final Log LOG = LogFactory.getLog(TestSCMMXBean.class);
+  private static int numOfDatanodes = 1;
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+  private static StorageContainerManager scm;
+  private static MBeanServer mbs;
+
+  @BeforeClass
+  public static void init() throws IOException, TimeoutException,
+      InterruptedException {
+    conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(numOfDatanodes)
+        .build();
+    cluster.waitForClusterToBeReady();
+    scm = cluster.getStorageContainerManager();
+    mbs = ManagementFactory.getPlatformMBeanServer();
+  }
+
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testSCMMXBean() throws Exception {
+    ObjectName bean = new ObjectName(
+        "Hadoop:service=StorageContainerManager,"
+            + "name=StorageContainerManagerInfo,"
+            + "component=ServerRuntime");
+
+    String dnRpcPort = (String)mbs.getAttribute(bean,
+        "DatanodeRpcPort");
+    assertEquals(scm.getDatanodeRpcPort(), dnRpcPort);
+
+
+    String clientRpcPort = (String)mbs.getAttribute(bean,
+        "ClientRpcPort");
+    assertEquals(scm.getClientRpcPort(), clientRpcPort);
+
+    ConcurrentMap<String, ContainerStat> map = scm.getContainerReportCache();
+    ContainerStat stat = new ContainerStat(1, 2, 3, 4, 5, 6, 7);
+    map.put("nodeID", stat);
+    TabularData data = (TabularData) mbs.getAttribute(
+        bean, "ContainerReport");
+
+    // verify report info
+    assertEquals(1, data.values().size());
+    for (Object obj : data.values()) {
+      assertTrue(obj instanceof CompositeData);
+      CompositeData d = (CompositeData) obj;
+      Iterator<?> it = d.values().iterator();
+      String key = it.next().toString();
+      String value = it.next().toString();
+      assertEquals("nodeID", key);
+      assertEquals(stat.toJsonString(), value);
+    }
+  }
+
+  @Test
+  public void testSCMNodeManagerMXBean() throws Exception {
+    final NodeManager scmNm = scm.getScmNodeManager();
+    ObjectName bean = new ObjectName(
+        "Hadoop:service=SCMNodeManager,name=SCMNodeManagerInfo");
+
+    Integer minChillNodes = (Integer)mbs.getAttribute(bean,
+        "MinimumChillModeNodes");
+    assertEquals(scmNm.getMinimumChillModeNodes(),
+        minChillNodes.intValue());
+
+    boolean isOutOfChillMode = (boolean)mbs.getAttribute(bean,
+        "OutOfChillMode");
+    assertEquals(scmNm.isOutOfChillMode(), isOutOfChillMode);
+
+    String chillStatus = (String)mbs.getAttribute(bean,
+        "ChillModeStatus");
+    assertEquals(scmNm.getChillModeStatus(), chillStatus);
+
+    TabularData nodeCountObj = (TabularData)mbs.getAttribute(bean,
+        "NodeCount");
+    verifyEquals(nodeCountObj, scm.getScmNodeManager().getNodeCount());
+  }
+
+  /**
+   * An internal function used to compare a TabularData returned
+   * by JMX with the expected data in a Map.
+   */
+  private void verifyEquals(TabularData data1,
+      Map<String, Integer> data2) {
+    if (data1 == null || data2 == null) {
+      fail("Data should not be null.");
+    }
+    for (Object obj : data1.values()) {
+      // Each TabularData is a set of CompositeData
+      assertTrue(obj instanceof CompositeData);
+      CompositeData cds = (CompositeData) obj;
+      assertEquals(2, cds.values().size());
+      Iterator<?> it = cds.values().iterator();
+      String key = it.next().toString();
+      String value = it.next().toString();
+      int num = Integer.parseInt(value);
+      assertTrue(data2.containsKey(key));
+      assertEquals(data2.get(key).intValue(), num);
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java
new file mode 100644
index 0000000..c28f68f
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java
@@ -0,0 +1,258 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import static org.apache.hadoop.test.MetricsAsserts.getLongCounter;
+import static org.apache.hadoop.test.MetricsAsserts.getLongGauge;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.junit.Assert.assertEquals;
+
+import java.util.UUID;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.StorageContainerManager;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerReport;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.ContainerStat;
+import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMMetrics;
+import org.apache.hadoop.hdds.scm.node.SCMNodeManager;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+/**
+ * This class tests the metrics of Storage Container Manager.
+ */
+public class TestSCMMetrics {
+  /**
+   * Set the timeout for each test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(90000);
+
+  private static MiniOzoneCluster cluster = null;
+
+  @Test
+  public void testContainerMetrics() throws Exception {
+    int nodeCount = 2;
+    int numReport = 2;
+    long size = OzoneConsts.GB * 5;
+    long used = OzoneConsts.GB * 2;
+    long readBytes = OzoneConsts.GB * 1;
+    long writeBytes = OzoneConsts.GB * 2;
+    int keyCount = 1000;
+    int readCount = 100;
+    int writeCount = 50;
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    try {
+      cluster = MiniOzoneCluster.newBuilder(conf)
+          .setNumDatanodes(nodeCount).build();
+      cluster.waitForClusterToBeReady();
+
+      ContainerStat stat = new ContainerStat(size, used, keyCount, readBytes,
+          writeBytes, readCount, writeCount);
+      StorageContainerManager scmManager = cluster.getStorageContainerManager();
+
+      ContainerReportsRequestProto request = createContainerReport(numReport,
+          stat, null);
+      String fstDatanodeUuid = request.getDatanodeDetails().getUuid();
+      scmManager.sendContainerReport(request);
+
+      // verify container stat metrics
+      MetricsRecordBuilder scmMetrics = getMetrics(SCMMetrics.SOURCE_NAME);
+      assertEquals(size * numReport,
+          getLongGauge("LastContainerReportSize", scmMetrics));
+      assertEquals(used * numReport,
+          getLongGauge("LastContainerReportUsed", scmMetrics));
+      assertEquals(readBytes * numReport,
+          getLongGauge("LastContainerReportReadBytes", scmMetrics));
+      assertEquals(writeBytes * numReport,
+          getLongGauge("LastContainerReportWriteBytes", scmMetrics));
+
+      assertEquals(keyCount * numReport,
+          getLongGauge("LastContainerReportKeyCount", scmMetrics));
+      assertEquals(readCount * numReport,
+          getLongGauge("LastContainerReportReadCount", scmMetrics));
+      assertEquals(writeCount * numReport,
+          getLongGauge("LastContainerReportWriteCount", scmMetrics));
+
+      // add one new report
+      request = createContainerReport(1, stat, null);
+      String sndDatanodeUuid = request.getDatanodeDetails().getUuid();
+      scmManager.sendContainerReport(request);
+
+      scmMetrics = getMetrics(SCMMetrics.SOURCE_NAME);
+      assertEquals(size * (numReport + 1),
+          getLongCounter("ContainerReportSize", scmMetrics));
+      assertEquals(used * (numReport + 1),
+          getLongCounter("ContainerReportUsed", scmMetrics));
+      assertEquals(readBytes * (numReport + 1),
+          getLongCounter("ContainerReportReadBytes", scmMetrics));
+      assertEquals(writeBytes * (numReport + 1),
+          getLongCounter("ContainerReportWriteBytes", scmMetrics));
+
+      assertEquals(keyCount * (numReport + 1),
+          getLongCounter("ContainerReportKeyCount", scmMetrics));
+      assertEquals(readCount * (numReport + 1),
+          getLongCounter("ContainerReportReadCount", scmMetrics));
+      assertEquals(writeCount * (numReport + 1),
+          getLongCounter("ContainerReportWriteCount", scmMetrics));
+
+      // Re-send reports but with different value for validating
+      // the aggregation.
+      stat = new ContainerStat(100, 50, 3, 50, 60, 5, 6);
+      scmManager.sendContainerReport(createContainerReport(1, stat,
+          fstDatanodeUuid));
+
+      stat = new ContainerStat(1, 1, 1, 1, 1, 1, 1);
+      scmManager.sendContainerReport(createContainerReport(1, stat,
+          sndDatanodeUuid));
+
+      // the global container metrics value should be updated
+      scmMetrics = getMetrics(SCMMetrics.SOURCE_NAME);
+      assertEquals(101, getLongCounter("ContainerReportSize", scmMetrics));
+      assertEquals(51, getLongCounter("ContainerReportUsed", scmMetrics));
+      assertEquals(51, getLongCounter("ContainerReportReadBytes", scmMetrics));
+      assertEquals(61, getLongCounter("ContainerReportWriteBytes", scmMetrics));
+
+      assertEquals(4, getLongCounter("ContainerReportKeyCount", scmMetrics));
+      assertEquals(6, getLongCounter("ContainerReportReadCount", scmMetrics));
+      assertEquals(7, getLongCounter("ContainerReportWriteCount", scmMetrics));
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  @Test
+  public void testStaleNodeContainerReport() throws Exception {
+    int nodeCount = 2;
+    int numReport = 2;
+    long size = OzoneConsts.GB * 5;
+    long used = OzoneConsts.GB * 2;
+    long readBytes = OzoneConsts.GB * 1;
+    long writeBytes = OzoneConsts.GB * 2;
+    int keyCount = 1000;
+    int readCount = 100;
+    int writeCount = 50;
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    try {
+      cluster = MiniOzoneCluster.newBuilder(conf)
+          .setNumDatanodes(nodeCount).build();
+      cluster.waitForClusterToBeReady();
+
+      ContainerStat stat = new ContainerStat(size, used, keyCount, readBytes,
+          writeBytes, readCount, writeCount);
+      StorageContainerManager scmManager = cluster.getStorageContainerManager();
+
+      String datanodeUuid = cluster.getHddsDatanodes().get(0)
+          .getDatanodeDetails().getUuidString();
+      ContainerReportsRequestProto request = createContainerReport(numReport,
+          stat, datanodeUuid);
+      scmManager.sendContainerReport(request);
+
+      MetricsRecordBuilder scmMetrics = getMetrics(SCMMetrics.SOURCE_NAME);
+      assertEquals(size * numReport,
+          getLongCounter("ContainerReportSize", scmMetrics));
+      assertEquals(used * numReport,
+          getLongCounter("ContainerReportUsed", scmMetrics));
+      assertEquals(readBytes * numReport,
+          getLongCounter("ContainerReportReadBytes", scmMetrics));
+      assertEquals(writeBytes * numReport,
+          getLongCounter("ContainerReportWriteBytes", scmMetrics));
+
+      assertEquals(keyCount * numReport,
+          getLongCounter("ContainerReportKeyCount", scmMetrics));
+      assertEquals(readCount * numReport,
+          getLongCounter("ContainerReportReadCount", scmMetrics));
+      assertEquals(writeCount * numReport,
+          getLongCounter("ContainerReportWriteCount", scmMetrics));
+
+      // reset stale interval time to move node from healthy to stale
+      SCMNodeManager nodeManager = (SCMNodeManager) cluster
+          .getStorageContainerManager().getScmNodeManager();
+      nodeManager.setStaleNodeIntervalMs(100);
+
+      // verify the metrics when node becomes stale
+      GenericTestUtils.waitFor(() -> {
+        MetricsRecordBuilder metrics = getMetrics(SCMMetrics.SOURCE_NAME);
+        return 0 == getLongCounter("ContainerReportSize", metrics)
+            && 0 == getLongCounter("ContainerReportUsed", metrics)
+            && 0 == getLongCounter("ContainerReportReadBytes", metrics)
+            && 0 == getLongCounter("ContainerReportWriteBytes", metrics)
+            && 0 == getLongCounter("ContainerReportKeyCount", metrics)
+            && 0 == getLongCounter("ContainerReportReadCount", metrics)
+            && 0 == getLongCounter("ContainerReportWriteCount", metrics);
+      }, 1000, 60000);
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  private ContainerReportsRequestProto createContainerReport(int numReport,
+      ContainerStat stat, String datanodeUuid) {
+    StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto.Builder
+        reportsBuilder = StorageContainerDatanodeProtocolProtos
+        .ContainerReportsRequestProto.newBuilder();
+
+    for (int i = 0; i < numReport; i++) {
+      ContainerReport report = new ContainerReport(
+          UUID.randomUUID().toString(), DigestUtils.sha256Hex("Simulated"));
+      report.setSize(stat.getSize().get());
+      report.setBytesUsed(stat.getUsed().get());
+      report.setReadCount(stat.getReadCount().get());
+      report.setReadBytes(stat.getReadBytes().get());
+      report.setKeyCount(stat.getKeyCount().get());
+      report.setWriteCount(stat.getWriteCount().get());
+      report.setWriteBytes(stat.getWriteBytes().get());
+      reportsBuilder.addReports(report.getProtoBufMessage());
+    }
+
+    DatanodeDetails datanodeDetails;
+    if (datanodeUuid == null) {
+      datanodeDetails = TestUtils.getDatanodeDetails();
+    } else {
+      datanodeDetails = DatanodeDetails.newBuilder()
+          .setUuid(datanodeUuid)
+          .setIpAddress("127.0.0.1")
+          .setHostName("localhost")
+          .setContainerPort(0)
+          .setRatisPort(0)
+          .setOzoneRestPort(0)
+          .build();
+    }
+
+    reportsBuilder.setDatanodeDetails(datanodeDetails.getProtoBufMessage());
+    reportsBuilder.setType(StorageContainerDatanodeProtocolProtos
+        .ContainerReportsRequestProto.reportType.fullReport);
+    return reportsBuilder.build();
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java
new file mode 100644
index 0000000..85403a2
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java
@@ -0,0 +1,198 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import com.google.common.cache.Cache;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
+import org.junit.Assert;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+
+import static org.apache.hadoop.hdds.scm
+    .ScmConfigKeys.SCM_CONTAINER_CLIENT_MAX_SIZE_KEY;
+
+/**
+ * Test for XceiverClientManager caching and eviction.
+ */
+public class TestXceiverClientManager {
+  private static OzoneConfiguration config;
+  private static MiniOzoneCluster cluster;
+  private static StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private static String containerOwner = "OZONE";
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @BeforeClass
+  public static void init() throws Exception {
+    config = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(config)
+        .setNumDatanodes(3)
+        .build();
+    cluster.waitForClusterToBeReady();
+    storageContainerLocationClient = cluster
+        .getStorageContainerLocationClient();
+  }
+
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+    IOUtils.cleanupWithLogger(null, storageContainerLocationClient);
+  }
+
+  @Test
+  public void testCaching() throws IOException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    XceiverClientManager clientManager = new XceiverClientManager(conf);
+
+    String containerName1 = "container" + RandomStringUtils.randomNumeric(10);
+    Pipeline pipeline1 = storageContainerLocationClient
+        .allocateContainer(clientManager.getType(), clientManager.getFactor(),
+            containerName1, containerOwner);
+    XceiverClientSpi client1 = clientManager.acquireClient(pipeline1);
+    Assert.assertEquals(1, client1.getRefcount());
+    Assert.assertEquals(containerName1,
+        client1.getPipeline().getContainerName());
+
+    String containerName2 = "container" + RandomStringUtils.randomNumeric(10);
+    Pipeline pipeline2 = storageContainerLocationClient
+        .allocateContainer(clientManager.getType(), clientManager.getFactor(),
+            containerName2, containerOwner);
+    XceiverClientSpi client2 = clientManager.acquireClient(pipeline2);
+    Assert.assertEquals(1, client2.getRefcount());
+    Assert.assertEquals(containerName2,
+        client2.getPipeline().getContainerName());
+
+    XceiverClientSpi client3 = clientManager.acquireClient(pipeline1);
+    Assert.assertEquals(2, client3.getRefcount());
+    Assert.assertEquals(2, client1.getRefcount());
+    Assert.assertEquals(containerName1,
+        client3.getPipeline().getContainerName());
+    Assert.assertEquals(client1, client3);
+    clientManager.releaseClient(client1);
+    clientManager.releaseClient(client2);
+    clientManager.releaseClient(client3);
+  }
+
+  @Test
+  public void testFreeByReference() throws IOException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.setInt(SCM_CONTAINER_CLIENT_MAX_SIZE_KEY, 1);
+    XceiverClientManager clientManager = new XceiverClientManager(conf);
+    Cache<String, XceiverClientSpi> cache =
+        clientManager.getClientCache();
+
+    String containerName1 = "container" + RandomStringUtils.randomNumeric(10);
+    Pipeline pipeline1 =
+        storageContainerLocationClient.allocateContainer(
+            clientManager.getType(), HddsProtos.ReplicationFactor.ONE,
+            containerName1, containerOwner);
+    XceiverClientSpi client1 = clientManager.acquireClient(pipeline1);
+    Assert.assertEquals(1, client1.getRefcount());
+    Assert.assertEquals(containerName1,
+        client1.getPipeline().getContainerName());
+
+    String containerName2 = "container" + RandomStringUtils.randomNumeric(10);
+    Pipeline pipeline2 =
+        storageContainerLocationClient.allocateContainer(
+            clientManager.getType(),
+            HddsProtos.ReplicationFactor.ONE, containerName2, containerOwner);
+    XceiverClientSpi client2 = clientManager.acquireClient(pipeline2);
+    Assert.assertEquals(1, client2.getRefcount());
+    Assert.assertEquals(containerName2,
+        client2.getPipeline().getContainerName());
+    Assert.assertNotEquals(client1, client2);
+
+    // least recent container (i.e containerName1) is evicted
+    XceiverClientSpi nonExistent1 = cache.getIfPresent(containerName1);
+    Assert.assertEquals(null, nonExistent1);
+    // However container call should succeed because of refcount on the client.
+    String traceID1 = "trace" + RandomStringUtils.randomNumeric(4);
+    ContainerProtocolCalls.createContainer(client1,  traceID1);
+
+    // After releasing the client, this connection should be closed
+    // and any container operations should fail
+    clientManager.releaseClient(client1);
+    exception.expect(IOException.class);
+    exception.expectMessage("This channel is not connected.");
+    ContainerProtocolCalls.createContainer(client1,  traceID1);
+    clientManager.releaseClient(client2);
+  }
+
+  @Test
+  public void testFreeByEviction() throws IOException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.setInt(SCM_CONTAINER_CLIENT_MAX_SIZE_KEY, 1);
+    XceiverClientManager clientManager = new XceiverClientManager(conf);
+    Cache<String, XceiverClientSpi> cache =
+        clientManager.getClientCache();
+
+    String containerName1 = "container" + RandomStringUtils.randomNumeric(10);
+    Pipeline pipeline1 =
+        storageContainerLocationClient.allocateContainer(
+            clientManager.getType(),
+            clientManager.getFactor(), containerName1, containerOwner);
+    XceiverClientSpi client1 = clientManager.acquireClient(pipeline1);
+    Assert.assertEquals(1, client1.getRefcount());
+    Assert.assertEquals(containerName1,
+        client1.getPipeline().getContainerName());
+
+    clientManager.releaseClient(client1);
+    Assert.assertEquals(0, client1.getRefcount());
+
+    String containerName2 = "container" + RandomStringUtils.randomNumeric(10);
+    Pipeline pipeline2 = storageContainerLocationClient
+        .allocateContainer(clientManager.getType(), clientManager.getFactor(),
+            containerName2, containerOwner);
+    XceiverClientSpi client2 = clientManager.acquireClient(pipeline2);
+    Assert.assertEquals(1, client2.getRefcount());
+    Assert.assertEquals(containerName2,
+        client2.getPipeline().getContainerName());
+    Assert.assertNotEquals(client1, client2);
+
+
+    // now client 1 should be evicted
+    XceiverClientSpi nonExistent = cache.getIfPresent(containerName1);
+    Assert.assertEquals(null, nonExistent);
+
+    // Any container operation should now fail
+    String traceID2 = "trace" + RandomStringUtils.randomNumeric(4);
+    exception.expect(IOException.class);
+    exception.expectMessage("This channel is not connected.");
+    ContainerProtocolCalls.createContainer(client1, traceID2);
+    clientManager.releaseClient(client2);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientMetrics.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientMetrics.java
new file mode 100644
index 0000000..1403f89
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientMetrics.java
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
+import static org.apache.hadoop.test.MetricsAsserts.getLongCounter;
+import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CountDownLatch;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.container.ContainerTestHelper;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientMetrics;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * This class tests the metrics of XceiverClient.
+ */
+public class TestXceiverClientMetrics {
+  // only for testing
+  private volatile boolean breakFlag;
+  private CountDownLatch latch;
+
+  private static OzoneConfiguration config;
+  private static MiniOzoneCluster cluster;
+  private static StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private static String containerOwner = "OZONE";
+
+  @BeforeClass
+  public static void init() throws Exception {
+    config = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(config).build();
+    cluster.waitForClusterToBeReady();
+    storageContainerLocationClient = cluster
+        .getStorageContainerLocationClient();
+  }
+
+  @AfterClass
+  public static void shutdown() {
+    cluster.shutdown();
+  }
+
+  @Test
+  public void testMetrics() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    XceiverClientManager clientManager = new XceiverClientManager(conf);
+
+    String containerName = "container" + RandomStringUtils.randomNumeric(10);
+    Pipeline pipeline = storageContainerLocationClient
+        .allocateContainer(clientManager.getType(), clientManager.getFactor(),
+            containerName, containerOwner);
+    XceiverClientSpi client = clientManager.acquireClient(pipeline);
+
+    ContainerCommandRequestProto request = ContainerTestHelper
+        .getCreateContainerRequest(containerName, pipeline);
+    client.sendCommand(request);
+
+    MetricsRecordBuilder containerMetrics = getMetrics(
+        XceiverClientMetrics.SOURCE_NAME);
+    // Above request command is in a synchronous way, so there will be no
+    // pending requests.
+    assertCounter("PendingOps", 0L, containerMetrics);
+    assertCounter("numPendingCreateContainer", 0L, containerMetrics);
+    // the counter value of average latency metric should be increased
+    assertCounter("CreateContainerLatencyNumOps", 1L, containerMetrics);
+
+    breakFlag = false;
+    latch = new CountDownLatch(1);
+
+    int numRequest = 10;
+    List<CompletableFuture<ContainerCommandResponseProto>> computeResults
+        = new ArrayList<>();
+    // start new thread to send async requests
+    Thread sendThread = new Thread(() -> {
+      while (!breakFlag) {
+        try {
+          // use async interface for testing pending metrics
+          for (int i = 0; i < numRequest; i++) {
+            String keyName = OzoneUtils.getRequestID();
+            ContainerProtos.ContainerCommandRequestProto smallFileRequest;
+
+            smallFileRequest = ContainerTestHelper.getWriteSmallFileRequest(
+                client.getPipeline(), containerName, keyName, 1024);
+            CompletableFuture<ContainerProtos.ContainerCommandResponseProto>
+                response = client.sendCommandAsync(smallFileRequest);
+            computeResults.add(response);
+          }
+
+          Thread.sleep(1000);
+        } catch (Exception ignored) {
+        }
+      }
+
+      latch.countDown();
+    });
+    sendThread.start();
+
+    GenericTestUtils.waitFor(() -> {
+      // check if pending metric count is increased
+      MetricsRecordBuilder metric =
+          getMetrics(XceiverClientMetrics.SOURCE_NAME);
+      long pendingOps = getLongCounter("PendingOps", metric);
+      long pendingPutSmallFileOps =
+          getLongCounter("numPendingPutSmallFile", metric);
+
+      if (pendingOps > 0 && pendingPutSmallFileOps > 0) {
+        // reset break flag
+        breakFlag = true;
+        return true;
+      } else {
+        return false;
+      }
+    }, 100, 60000);
+
+    // blocking until we stop sending async requests
+    latch.await();
+    // Wait for all futures being done.
+    GenericTestUtils.waitFor(() -> {
+      for (CompletableFuture future : computeResults) {
+        if (!future.isDone()) {
+          return false;
+        }
+      }
+
+      return true;
+    }, 100, 60000);
+
+    // the counter value of pending metrics should be decreased to 0
+    containerMetrics = getMetrics(XceiverClientMetrics.SOURCE_NAME);
+    assertCounter("PendingOps", 0L, containerMetrics);
+    assertCounter("numPendingPutSmallFile", 0L, containerMetrics);
+
+    clientManager.close();
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/node/TestQueryNode.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/node/TestQueryNode.java
new file mode 100644
index 0000000..b999c92
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/node/TestQueryNode.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.scm.node;
+
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.EnumSet;
+import java.util.concurrent.TimeUnit;
+
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.HEALTHY;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_DEADNODE_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .OZONE_SCM_STALENODE_INTERVAL;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Test Query Node Operation.
+ */
+public class TestQueryNode {
+  private static int numOfDatanodes = 5;
+  private MiniOzoneCluster cluster;
+
+  private ContainerOperationClient scmClient;
+
+  @Before
+  public void setUp() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    final int interval = 100;
+
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL,
+        interval, TimeUnit.MILLISECONDS);
+    conf.setTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, 1, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, SECONDS);
+    conf.setTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, 6, SECONDS);
+
+    cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(numOfDatanodes)
+        .build();
+    cluster.waitForClusterToBeReady();
+    scmClient = new ContainerOperationClient(cluster
+        .getStorageContainerLocationClient(),
+        new XceiverClientManager(conf));
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testHealthyNodesCount() throws Exception {
+    HddsProtos.NodePool pool = scmClient.queryNode(
+        EnumSet.of(HEALTHY),
+        HddsProtos.QueryScope.CLUSTER, "");
+    assertEquals("Expected  live nodes", numOfDatanodes,
+        pool.getNodesCount());
+  }
+
+  @Test(timeout = 10 * 1000L)
+  public void testStaleNodesCount() throws Exception {
+    cluster.shutdownHddsDatanode(0);
+    cluster.shutdownHddsDatanode(1);
+
+    GenericTestUtils.waitFor(() ->
+            cluster.getStorageContainerManager().getNodeCount(STALE) == 2,
+        100, 4 * 1000);
+
+    int nodeCount = scmClient.queryNode(EnumSet.of(STALE),
+        HddsProtos.QueryScope.CLUSTER, "").getNodesCount();
+    assertEquals("Mismatch of expected nodes count", 2, nodeCount);
+
+    GenericTestUtils.waitFor(() ->
+            cluster.getStorageContainerManager().getNodeCount(DEAD) == 2,
+        100, 4 * 1000);
+
+    // Assert that we don't find any stale nodes.
+    nodeCount = scmClient.queryNode(EnumSet.of(STALE),
+        HddsProtos.QueryScope.CLUSTER, "").getNodesCount();
+    assertEquals("Mismatch of expected nodes count", 0, nodeCount);
+
+    // Assert that we find the expected number of dead nodes.
+    nodeCount = scmClient.queryNode(EnumSet.of(DEAD),
+        HddsProtos.QueryScope.CLUSTER, "").getNodesCount();
+    assertEquals("Mismatch of expected nodes count", 2, nodeCount);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java
new file mode 100644
index 0000000..1015ae1
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web;
+
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.TestOzoneHelper;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.Rule;
+import org.junit.BeforeClass;
+import org.junit.AfterClass;
+import org.junit.Test;
+import org.junit.Assert;
+
+import org.junit.rules.Timeout;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+/**
+ * Test ozone volume in the distributed storage handler scenario.
+ */
+public class TestDistributedOzoneVolumes extends TestOzoneHelper {
+  private static final org.slf4j.Logger LOG =
+      LoggerFactory.getLogger(TestDistributedOzoneVolumes.class);
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static MiniOzoneCluster cluster = null;
+  private static int port = 0;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    Logger.getLogger("log4j.logger.org.apache.http").setLevel(Level.DEBUG);
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    port = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  /**
+   * Creates Volumes on Ozone Store.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumes() throws IOException {
+    super.testCreateVolumes(port);
+    Assert.assertEquals(0, cluster.getKeySpaceManager()
+        .getMetrics().getNumVolumeCreateFails());
+  }
+
+  /**
+   * Create Volumes with Quota.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithQuota() throws IOException {
+    super.testCreateVolumesWithQuota(port);
+    Assert.assertEquals(0, cluster.getKeySpaceManager()
+        .getMetrics().getNumVolumeCreateFails());
+  }
+
+  /**
+   * Create Volumes with Invalid Quota.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithInvalidQuota() throws IOException {
+    super.testCreateVolumesWithInvalidQuota(port);
+    Assert.assertEquals(0, cluster.getKeySpaceManager()
+        .getMetrics().getNumVolumeCreateFails());
+  }
+
+  /**
+   * To create a volume a user name must be specified using OZONE_USER header.
+   * This test verifies that we get an error in case we call without a OZONE
+   * user name.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithInvalidUser() throws IOException {
+    super.testCreateVolumesWithInvalidUser(port);
+    Assert.assertEquals(0, cluster.getKeySpaceManager()
+        .getMetrics().getNumVolumeCreateFails());
+  }
+
+  /**
+   * Only Admins can create volumes in Ozone. This test uses simple userauth as
+   * backend and hdfs and root are admin users in the simple backend.
+   * <p>
+   * This test tries to create a volume as user bilbo.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithOutAdminRights() throws IOException {
+    super.testCreateVolumesWithOutAdminRights(port);
+    Assert.assertEquals(0, cluster.getKeySpaceManager()
+        .getMetrics().getNumVolumeCreateFails());
+  }
+
+  /**
+   * Create a bunch of volumes in a loop.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesInLoop() throws IOException {
+    super.testCreateVolumesInLoop(port);
+    Assert.assertEquals(0, cluster.getKeySpaceManager()
+        .getMetrics().getNumVolumeCreateFails());
+  }
+  /**
+   * Get volumes owned by the user.
+   *
+   * @throws IOException
+   */
+  public void testGetVolumesByUser() throws IOException {
+    testGetVolumesByUser(port);
+  }
+
+  /**
+   * Admins can read volumes belonging to other users.
+   *
+   * @throws IOException
+   */
+  public void testGetVolumesOfAnotherUser() throws IOException {
+    super.testGetVolumesOfAnotherUser(port);
+  }
+
+  /**
+   * if you try to read volumes belonging to another user,
+   * then server always ignores it.
+   *
+   * @throws IOException
+   */
+  public void testGetVolumesOfAnotherUserShouldFail() throws IOException {
+    super.testGetVolumesOfAnotherUserShouldFail(port);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java
new file mode 100644
index 0000000..922587e
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web;
+
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.TestOzoneHelper;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.io.IOException;
+
+/**
+ * Test ozone volume in the local storage handler scenario.
+ */
+public class TestLocalOzoneVolumes extends TestOzoneHelper {
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static MiniOzoneCluster cluster = null;
+  private static int port = 0;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "local" , which uses a local directory to
+   * emulate Ozone backend.
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    String path = GenericTestUtils
+        .getTempPath(TestLocalOzoneVolumes.class.getSimpleName());
+    path += conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+    Logger.getLogger("log4j.logger.org.apache.http").setLevel(Level.DEBUG);
+
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    port = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  /**
+   * Creates Volumes on Ozone Store.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumes() throws IOException {
+    super.testCreateVolumes(port);
+  }
+
+  /**
+   * Create Volumes with Quota.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithQuota() throws IOException {
+    super.testCreateVolumesWithQuota(port);
+  }
+
+  /**
+   * Create Volumes with Invalid Quota.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithInvalidQuota() throws IOException {
+    super.testCreateVolumesWithInvalidQuota(port);
+  }
+
+  /**
+   * To create a volume a user name must be specified using OZONE_USER header.
+   * This test verifies that we get an error in case we call without a OZONE
+   * user name.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithInvalidUser() throws IOException {
+    super.testCreateVolumesWithInvalidUser(port);
+  }
+
+  /**
+   * Only Admins can create volumes in Ozone. This test uses simple userauth as
+   * backend and hdfs and root are admin users in the simple backend.
+   * <p>
+   * This test tries to create a volume as user bilbo.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testCreateVolumesWithOutAdminRights() throws IOException {
+    super.testCreateVolumesWithOutAdminRights(port);
+  }
+
+  /**
+   * Create a bunch of volumes in a loop.
+   *
+   * @throws IOException
+   */
+  //@Test
+  public void testCreateVolumesInLoop() throws IOException {
+    super.testCreateVolumesInLoop(port);
+  }
+  /**
+   * Get volumes owned by the user.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testGetVolumesByUser() throws IOException {
+    super.testGetVolumesByUser(port);
+  }
+
+  /**
+   * Admins can read volumes belonging to other users.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testGetVolumesOfAnotherUser() throws IOException {
+    super.testGetVolumesOfAnotherUser(port);
+  }
+
+  /**
+   * if you try to read volumes belonging to another user,
+   * then server always ignores it.
+   *
+   * @throws IOException
+   */
+  @Test @Ignore
+  public void testGetVolumesOfAnotherUserShouldFail() throws IOException {
+    super.testGetVolumesOfAnotherUserShouldFail(port);
+  }
+
+  @Test
+  public void testListKeyOnEmptyBucket() throws IOException {
+    super.testListKeyOnEmptyBucket(port);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneRestWithMiniCluster.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneRestWithMiniCluster.java
new file mode 100644
index 0000000..5b67657
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneRestWithMiniCluster.java
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+import static com.google.common.base.Charsets.UTF_8;
+import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
+import static org.apache.hadoop.ozone.OzoneConsts.CHUNK_SIZE;
+import static org.junit.Assert.*;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.web.client.OzoneRestClient;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.junit.rules.Timeout;
+
+/**
+ * End-to-end testing of Ozone REST operations.
+ */
+public class TestOzoneRestWithMiniCluster {
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration conf;
+  private static OzoneRestClient ozoneClient;
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @BeforeClass
+  public static void init() throws Exception {
+    conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    int port = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+    ozoneClient = new OzoneRestClient(
+        String.format("http://localhost:%d", port));
+    ozoneClient.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+  }
+
+  @AfterClass
+  public static void shutdown() throws InterruptedException {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+    IOUtils.cleanupWithLogger(null, ozoneClient);
+  }
+
+  @Test
+  public void testCreateAndGetVolume() throws Exception {
+    String volumeName = nextId("volume");
+    OzoneVolume volume = ozoneClient.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+    volume = ozoneClient.getVolume(volumeName);
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+  }
+
+  @Test
+  public void testCreateAndGetBucket() throws Exception {
+    String volumeName = nextId("volume");
+    String bucketName = nextId("bucket");
+    OzoneVolume volume = ozoneClient.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+    OzoneBucket bucket = volume.createBucket(bucketName);
+    assertNotNull(bucket);
+    assertEquals(bucketName, bucket.getBucketName());
+    bucket = volume.getBucket(bucketName);
+    assertNotNull(bucket);
+    assertEquals(bucketName, bucket.getBucketName());
+  }
+
+  @Test
+  public void testPutAndGetKey() throws Exception {
+    String volumeName = nextId("volume");
+    String bucketName = nextId("bucket");
+    String keyName = nextId("key");
+    String keyData = nextId("data");
+    OzoneVolume volume = ozoneClient.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+    OzoneBucket bucket = volume.createBucket(bucketName);
+    assertNotNull(bucket);
+    assertEquals(bucketName, bucket.getBucketName());
+    bucket.putKey(keyName, keyData);
+    assertEquals(keyData, bucket.getKey(keyName));
+  }
+
+  @Test
+  public void testPutAndGetEmptyKey() throws Exception {
+    String volumeName = nextId("volume");
+    String bucketName = nextId("bucket");
+    String keyName = nextId("key");
+    String keyData = "";
+    OzoneVolume volume = ozoneClient.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+    OzoneBucket bucket = volume.createBucket(bucketName);
+    assertNotNull(bucket);
+    assertEquals(bucketName, bucket.getBucketName());
+    bucket.putKey(keyName, keyData);
+    assertEquals(keyData, bucket.getKey(keyName));
+  }
+
+  @Test
+  public void testPutAndGetMultiChunkKey() throws Exception {
+    String volumeName = nextId("volume");
+    String bucketName = nextId("bucket");
+    String keyName = nextId("key");
+    int keyDataLen = 3 * CHUNK_SIZE;
+    String keyData = buildKeyData(keyDataLen);
+    OzoneVolume volume = ozoneClient.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+    OzoneBucket bucket = volume.createBucket(bucketName);
+    assertNotNull(bucket);
+    assertEquals(bucketName, bucket.getBucketName());
+    bucket.putKey(keyName, keyData);
+    assertEquals(keyData, bucket.getKey(keyName));
+  }
+
+  @Test
+  public void testPutAndGetMultiChunkKeyLastChunkPartial() throws Exception {
+    String volumeName = nextId("volume");
+    String bucketName = nextId("bucket");
+    String keyName = nextId("key");
+    int keyDataLen = (int)(2.5 * CHUNK_SIZE);
+    String keyData = buildKeyData(keyDataLen);
+    OzoneVolume volume = ozoneClient.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+    OzoneBucket bucket = volume.createBucket(bucketName);
+    assertNotNull(bucket);
+    assertEquals(bucketName, bucket.getBucketName());
+    bucket.putKey(keyName, keyData);
+    assertEquals(keyData, bucket.getKey(keyName));
+  }
+
+  @Test
+  public void testReplaceKey() throws Exception {
+    String volumeName = nextId("volume");
+    String bucketName = nextId("bucket");
+    String keyName = nextId("key");
+    int keyDataLen = (int)(2.5 * CHUNK_SIZE);
+    String keyData = buildKeyData(keyDataLen);
+    OzoneVolume volume = ozoneClient.createVolume(volumeName, "bilbo", "100TB");
+    assertNotNull(volume);
+    assertEquals(volumeName, volume.getVolumeName());
+    assertEquals(ozoneClient.getUserAuth(), volume.getCreatedby());
+    assertEquals("bilbo", volume.getOwnerName());
+    assertNotNull(volume.getQuota());
+    assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(),
+        volume.getQuota().sizeInBytes());
+    OzoneBucket bucket = volume.createBucket(bucketName);
+    assertNotNull(bucket);
+    assertEquals(bucketName, bucket.getBucketName());
+    bucket.putKey(keyName, keyData);
+    assertEquals(keyData, bucket.getKey(keyName));
+
+    // Replace key with data consisting of fewer chunks.
+    keyDataLen = (int)(1.5 * CHUNK_SIZE);
+    keyData = buildKeyData(keyDataLen);
+    bucket.putKey(keyName, keyData);
+    assertEquals(keyData, bucket.getKey(keyName));
+
+    // Replace key with data consisting of more chunks.
+    keyDataLen = (int)(3.5 * CHUNK_SIZE);
+    keyData = buildKeyData(keyDataLen);
+    bucket.putKey(keyName, keyData);
+    assertEquals(keyData, bucket.getKey(keyName));
+  }
+
+  /**
+   * Creates sample key data of the specified length.  The data is a string of
+   * printable ASCII characters.  This makes it easy to debug through visual
+   * inspection of the chunk files if a test fails.
+   *
+   * @param keyDataLen desired length of key data
+   * @return string of printable ASCII characters of the specified length
+   */
+  private static String buildKeyData(int keyDataLen) {
+    return new String(dataset(keyDataLen, 33, 93), UTF_8);
+  }
+
+  /**
+   * Generates identifiers unique enough for use in tests, so that individual
+   * tests don't collide on each others' data in the shared mini-cluster.
+   *
+   * @param idPrefix prefix to put in front of ID
+   * @return unique ID generated by appending a suffix to the given prefix
+   */
+  private static String nextId(String idPrefix) {
+    return (idPrefix + RandomStringUtils.random(5, true, true)).toLowerCase();
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java
new file mode 100644
index 0000000..6c32f07
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Time;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import javax.ws.rs.core.HttpHeaders;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Locale;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static org.apache.hadoop.ozone.web.utils.OzoneUtils.getRequestID;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Test Ozone Access through REST protocol.
+ */
+public class TestOzoneWebAccess {
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static MiniOzoneCluster cluster;
+  private static int port;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   *
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "local" , which uses a local directory to
+   * emulate Ozone backend.
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    String path = GenericTestUtils
+        .getTempPath(TestOzoneWebAccess.class.getSimpleName());
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    port = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+  }
+
+  /**
+   * shutdown MiniOzoneCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  /**
+   * Send a vaild Ozone Request.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testOzoneRequest() throws IOException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    CloseableHttpClient client = HttpClients.createDefault();
+    String volumeName = getRequestID().toLowerCase(Locale.US);
+    try {
+      HttpPost httppost = new HttpPost(
+          String.format("http://localhost:%d/%s", port, volumeName));
+
+      httppost.addHeader(Header.OZONE_VERSION_HEADER,
+          Header.OZONE_V1_VERSION_HEADER);
+      httppost.addHeader(HttpHeaders.DATE,
+          format.format(new Date(Time.now())));
+      httppost.addHeader(HttpHeaders.AUTHORIZATION,
+          Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " " +
+              OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      httppost.addHeader(Header.OZONE_USER, OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+      HttpResponse response = client.execute(httppost);
+      assertEquals(response.toString(), HTTP_CREATED,
+          response.getStatusLine().getStatusCode());
+    } finally {
+      client.close();
+    }
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java
new file mode 100644
index 0000000..46539e7
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java
@@ -0,0 +1,248 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Time;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.text.ParseException;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test Ozone Bucket Lifecycle.
+ */
+public class TestBuckets {
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static MiniOzoneCluster cluster = null;
+  private static OzoneRestClient ozoneRestClient = null;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "local" , which uses a local directory to
+   * emulate Ozone backend.
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws IOException,
+      URISyntaxException, OzoneException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    String path = GenericTestUtils
+        .getTempPath(TestBuckets.class.getSimpleName());
+    path += conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    final int port = cluster.getHddsDatanodes().get(0).getDatanodeDetails()
+        .getOzoneRestPort();
+    ozoneRestClient = new OzoneRestClient(
+        String.format("http://localhost:%d", port));
+  }
+
+  /**
+   * shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testCreateBucket() throws Exception {
+    runTestCreateBucket(ozoneRestClient);
+  }
+
+  static void runTestCreateBucket(OzoneRestClient client)
+      throws OzoneException, IOException, ParseException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth("hdfs");
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    String[] acls = {"user:frodo:rw", "user:samwise:rw"};
+
+    // create 10 buckets under same volume
+    for (int x = 0; x < 10; x++) {
+      long currentTime = Time.now();
+      String bucketName = OzoneUtils.getRequestID().toLowerCase();
+      OzoneBucket bucket =
+          vol.createBucket(bucketName, acls, StorageType.DEFAULT);
+      assertEquals(bucket.getBucketName(), bucketName);
+
+      // verify the bucket creation time
+      assertTrue((OzoneUtils.formatDate(bucket.getCreatedOn())
+          / 1000) >= (currentTime / 1000));
+    }
+    client.close();
+
+    assertEquals(vol.getVolumeName(), volumeName);
+    assertEquals(vol.getCreatedby(), "hdfs");
+    assertEquals(vol.getOwnerName(), "bilbo");
+    assertEquals(vol.getQuota().getUnit(), OzoneQuota.Units.TB);
+    assertEquals(vol.getQuota().getSize(), 100);
+
+    // Test create a bucket with invalid bucket name,
+    // not use Rule here because the test method is static.
+    try {
+      String invalidBucketName = "#" + OzoneUtils.getRequestID().toLowerCase();
+      vol.createBucket(invalidBucketName, acls, StorageType.DEFAULT);
+      fail("Except the bucket creation to be failed because the"
+          + " bucket name starts with an invalid char #");
+    } catch (Exception e) {
+      assertTrue(e instanceof OzoneRestClientException);
+      assertTrue(e.getMessage().contains("Bucket or Volume name"
+          + " has an unsupported character : #"));
+    }
+  }
+
+  @Test
+  public void testAddBucketAcls() throws Exception {
+    runTestAddBucketAcls(ozoneRestClient);
+  }
+
+  static void runTestAddBucketAcls(OzoneRestClient client)
+      throws OzoneException, IOException, ParseException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth("hdfs");
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    String[] acls = {"user:frodo:rw", "user:samwise:rw"};
+    String bucketName = OzoneUtils.getRequestID().toLowerCase();
+    vol.createBucket(bucketName);
+    vol.addAcls(bucketName, acls);
+    OzoneBucket updatedBucket = vol.getBucket(bucketName);
+    assertEquals(updatedBucket.getAcls().size(), 2);
+    // verify if the creation time is missing after update operation
+    assertTrue(
+        (OzoneUtils.formatDate(updatedBucket.getCreatedOn()) / 1000) >= 0);
+    client.close();
+  }
+
+  @Test
+  public void testRemoveBucketAcls() throws Exception {
+    runTestRemoveBucketAcls(ozoneRestClient);
+  }
+
+  static void runTestRemoveBucketAcls(OzoneRestClient client)
+      throws OzoneException, IOException, ParseException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth("hdfs");
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    String[] acls = {"user:frodo:rw", "user:samwise:rw"};
+    String bucketName = OzoneUtils.getRequestID().toLowerCase();
+    OzoneBucket bucket = vol.createBucket(bucketName, acls);
+    assertEquals(bucket.getAcls().size(), 2);
+    vol.removeAcls(bucketName, acls);
+    OzoneBucket updatedBucket = vol.getBucket(bucketName);
+
+    // We removed all acls
+    assertEquals(updatedBucket.getAcls().size(), 0);
+    // verify if the creation time is missing after update operation
+    assertTrue(
+        (OzoneUtils.formatDate(updatedBucket.getCreatedOn()) / 1000) >= 0);
+    client.close();
+  }
+
+  @Test
+  public void testDeleteBucket() throws OzoneException, IOException {
+    runTestDeleteBucket(ozoneRestClient);
+  }
+
+  static void runTestDeleteBucket(OzoneRestClient client)
+      throws OzoneException, IOException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth("hdfs");
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    String[] acls = {"user:frodo:rw", "user:samwise:rw"};
+    String bucketName = OzoneUtils.getRequestID().toLowerCase();
+    vol.createBucket(bucketName, acls);
+    vol.deleteBucket(bucketName);
+    try {
+      OzoneBucket updatedBucket = vol.getBucket(bucketName);
+      fail("Fetching deleted bucket, Should not reach here.");
+    } catch (Exception ex) {
+      // must throw
+      assertNotNull(ex);
+    }
+    client.close();
+  }
+
+  @Test
+  public void testListBucket() throws Exception {
+    runTestListBucket(ozoneRestClient);
+  }
+
+  static void runTestListBucket(OzoneRestClient client)
+      throws OzoneException, IOException, ParseException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth("hdfs");
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    String[] acls = {"user:frodo:rw", "user:samwise:rw"};
+
+    long currentTime = Time.now();
+    for (int x = 0; x < 10; x++) {
+      String bucketName = "listbucket-test-" + x;
+      vol.createBucket(bucketName, acls);
+    }
+    List<OzoneBucket> bucketList = vol.listBuckets("100", null, null);
+    assertEquals(bucketList.size(), 10);
+
+    for (OzoneBucket bucket : bucketList) {
+      assertTrue((OzoneUtils.formatDate(bucket.getCreatedOn())
+          / 1000) >= (currentTime / 1000));
+    }
+
+    bucketList = vol.listBuckets("3", null, null);
+    assertEquals(bucketList.size(), 3);
+
+    bucketList = vol.listBuckets("100", "listbucket-test-4", null);
+    assertEquals(bucketList.size(), 5);
+
+    bucketList = vol.listBuckets("100", null, "listbucket-test-3");
+    assertEquals(bucketList.size(), 1);
+
+    client.close();
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBucketsRatis.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBucketsRatis.java
new file mode 100644
index 0000000..b913a86
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBucketsRatis.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.hadoop.ozone.RatisTestHelper;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.io.IOException;
+
+/** The same as {@link TestBuckets} except that this test is Ratis enabled. */
+@Ignore("Disabling Ratis tests for pipeline work.")
+public class TestBucketsRatis {
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static RatisTestHelper.RatisTestSuite suite;
+  private static OzoneRestClient ozoneRestClient;
+
+  @BeforeClass
+  public static void init() throws Exception {
+    suite = new RatisTestHelper.RatisTestSuite(TestBucketsRatis.class);
+    ozoneRestClient = suite.newOzoneRestClient();
+  }
+
+  @AfterClass
+  public static void shutdown() {
+    if (suite != null) {
+      suite.close();
+    }
+  }
+
+  @Test
+  public void testCreateBucket() throws Exception {
+    TestBuckets.runTestCreateBucket(ozoneRestClient);
+  }
+
+  @Test
+  public void testAddBucketAcls() throws Exception {
+    TestBuckets.runTestAddBucketAcls(ozoneRestClient);
+  }
+
+  @Test
+  public void testRemoveBucketAcls() throws Exception {
+    TestBuckets.runTestRemoveBucketAcls(ozoneRestClient);
+  }
+
+  @Test
+  public void testDeleteBucket() throws OzoneException, IOException {
+    TestBuckets.runTestDeleteBucket(ozoneRestClient);
+  }
+  @Test
+  public void testListBucket() throws Exception {
+    TestBuckets.runTestListBucket(ozoneRestClient);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
new file mode 100644
index 0000000..ae30fb3
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
@@ -0,0 +1,688 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang.math.RandomUtils;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.helpers.KeyData;
+import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
+import org.apache.hadoop.ozone.ksm.KeySpaceManager;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos
+    .Status;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test Ozone Key Lifecycle.
+ */
+public class TestKeys {
+  /**
+   * Set the timeout for every test.
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+
+  private static MiniOzoneCluster ozoneCluster = null;
+  private static String path;
+  private static OzoneRestClient ozoneRestClient = null;
+  private static long currentTime;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    // Set short block deleting service interval to speed up deletions.
+    conf.setTimeDuration(OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL,
+        1000, TimeUnit.MILLISECONDS);
+
+    path = GenericTestUtils.getTempPath(TestKeys.class.getSimpleName());
+    Logger.getLogger("log4j.logger.org.apache.http").setLevel(Level.DEBUG);
+
+    ozoneCluster = MiniOzoneCluster.newBuilder(conf).build();
+    ozoneCluster.waitForClusterToBeReady();
+    final int port = ozoneCluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+    ozoneRestClient = new OzoneRestClient(
+        String.format("http://localhost:%d", port));
+    currentTime = Time.now();
+  }
+
+  /**
+   * shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (ozoneCluster != null) {
+      ozoneCluster.shutdown();
+    }
+  }
+
+  /**
+   * Creates a file with Random Data.
+   *
+   * @return File.
+   */
+  static File createRandomDataFile(String dir, String fileName, long size)
+      throws IOException {
+    File tmpDir = new File(dir);
+    FileUtils.forceMkdir(tmpDir);
+    File tmpFile = new File(tmpDir, fileName);
+    try (FileOutputStream randFile = new FileOutputStream(tmpFile)) {
+      Random r = new Random();
+      for (int x = 0; x < size; x++) {
+        char c = (char) (r.nextInt(26) + 'a');
+        randFile.write(c);
+      }
+
+    } catch (IOException e) {
+      fail(e.getMessage());
+    }
+    return tmpFile;
+  }
+
+  /**
+   * This function generates multi part key which are delimited by a certain
+   * delimiter. Different parts of key are random string of random length
+   * between 0 - 4. Number of parts of the keys are between 0 and 5.
+   *
+   * @param delimiter delimiter used to delimit parts of string
+   * @return Key composed of multiple parts delimited by "/"
+   */
+  static String getMultiPartKey(String delimiter) {
+    int numParts = RandomUtils.nextInt(5) + 1;
+    String[] nameParts = new String[numParts];
+    for (int i = 0; i < numParts; i++) {
+      int stringLength = numParts == 1 ? 5 : RandomUtils.nextInt(5);
+      nameParts[i] = RandomStringUtils.randomAlphanumeric(stringLength);
+    }
+    return StringUtils.join(delimiter, nameParts);
+  }
+
+  static class PutHelper {
+    private final OzoneRestClient client;
+    private final String dir;
+    private final String keyName;
+
+    private OzoneVolume vol;
+    private OzoneBucket bucket;
+    private File file;
+
+    PutHelper(OzoneRestClient client, String dir) {
+      this(client, dir, OzoneUtils.getRequestID().toLowerCase());
+    }
+
+    PutHelper(OzoneRestClient client, String dir, String key) {
+      this.client = client;
+      this.dir = dir;
+      this.keyName = key;
+    }
+
+    public OzoneVolume getVol() {
+      return vol;
+    }
+
+    public OzoneBucket getBucket() {
+      return bucket;
+    }
+
+    public File getFile() {
+      return file;
+    }
+
+    /**
+     * This function is reused in all other tests.
+     *
+     * @return Returns the name of the new key that was created.
+     * @throws OzoneException
+     */
+    private KsmKeyArgs putKey() throws Exception {
+      String volumeName = OzoneUtils.getRequestID().toLowerCase();
+      client.setUserAuth("hdfs");
+
+      vol = client.createVolume(volumeName, "bilbo", "100TB");
+      String[] acls = {"user:frodo:rw", "user:samwise:rw"};
+
+      String bucketName = OzoneUtils.getRequestID().toLowerCase();
+      bucket = vol.createBucket(bucketName, acls, StorageType.DEFAULT);
+
+      String fileName = OzoneUtils.getRequestID().toLowerCase();
+
+      file = createRandomDataFile(dir, fileName, 1024);
+
+      bucket.putKey(keyName, file);
+      return new KsmKeyArgs.Builder()
+          .setKeyName(keyName)
+          .setVolumeName(volumeName)
+          .setBucketName(bucketName)
+          .setDataSize(1024)
+          .build();
+    }
+  }
+
+  @Test
+  public void testPutKey() throws Exception {
+    // Test non-delimited keys
+    runTestPutKey(new PutHelper(ozoneRestClient, path));
+    // Test key delimited by a random delimiter
+    String delimiter = RandomStringUtils.randomAscii(1);
+    runTestPutKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  static void runTestPutKey(PutHelper helper) throws Exception {
+    final OzoneRestClient client = helper.client;
+    helper.putKey();
+    assertNotNull(helper.getBucket());
+    assertNotNull(helper.getFile());
+    List<OzoneKey> keyList = helper.getBucket().listKeys("100", null, null);
+    Assert.assertEquals(1, keyList.size());
+
+    // test list key using a more efficient call
+    String newkeyName = OzoneUtils.getRequestID().toLowerCase();
+    client.putKey(helper.getVol().getVolumeName(),
+        helper.getBucket().getBucketName(), newkeyName, helper.getFile());
+    keyList = helper.getBucket().listKeys("100", null, null);
+    Assert.assertEquals(2, keyList.size());
+
+    // test new put key with invalid volume/bucket name
+    try {
+      client.putKey("invalid-volume",
+          helper.getBucket().getBucketName(), newkeyName, helper.getFile());
+      fail("Put key should have thrown"
+          + " when using invalid volume name.");
+    } catch (OzoneException e) {
+      GenericTestUtils.assertExceptionContains(
+          Status.VOLUME_NOT_FOUND.toString(), e);
+    }
+
+    try {
+      client.putKey(helper.getVol().getVolumeName(), "invalid-bucket",
+          newkeyName, helper.getFile());
+      fail("Put key should have thrown "
+          + "when using invalid bucket name.");
+    } catch (OzoneException e) {
+      GenericTestUtils.assertExceptionContains(
+          Status.BUCKET_NOT_FOUND.toString(), e);
+    }
+  }
+
+  private static void restartDatanode(
+      MiniOzoneCluster cluster, int datanodeIdx, OzoneRestClient client)
+      throws OzoneException, URISyntaxException {
+    cluster.restartHddsDatanode(datanodeIdx);
+    // refresh the datanode endpoint uri after datanode restart
+    final int port = ozoneCluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+    client.setEndPoint(String.format("http://localhost:%d", port));
+  }
+
+  @Test
+  public void testPutAndGetKeyWithDnRestart() throws Exception {
+    runTestPutAndGetKeyWithDnRestart(
+        new PutHelper(ozoneRestClient, path), ozoneCluster);
+    String delimiter = RandomStringUtils.randomAscii(1);
+    runTestPutAndGetKeyWithDnRestart(
+        new PutHelper(ozoneRestClient, path,
+            getMultiPartKey(delimiter)), ozoneCluster);
+  }
+
+  static void runTestPutAndGetKeyWithDnRestart(
+      PutHelper helper, MiniOzoneCluster cluster) throws Exception {
+    String keyName = helper.putKey().getKeyName();
+    assertNotNull(helper.getBucket());
+    assertNotNull(helper.getFile());
+
+    // restart the datanode
+    restartDatanode(cluster, 0, helper.client);
+    // verify getKey after the datanode restart
+    String newFileName = helper.dir + "/"
+        + OzoneUtils.getRequestID().toLowerCase();
+    Path newPath = Paths.get(newFileName);
+
+    helper.getBucket().getKey(keyName, newPath);
+
+    try (
+        FileInputStream original = new FileInputStream(helper.getFile());
+        FileInputStream downloaded = new FileInputStream(newPath.toFile())) {
+      String originalHash = DigestUtils.sha256Hex(original);
+      String downloadedHash = DigestUtils.sha256Hex(downloaded);
+      assertEquals(
+          "Sha256 does not match between original file and downloaded file.",
+          originalHash, downloadedHash);
+    }
+  }
+
+  @Test
+  public void testPutAndGetKey() throws Exception {
+    runTestPutAndGetKey(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAscii(1);
+    runTestPutAndGetKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  static void runTestPutAndGetKey(PutHelper helper) throws Exception {
+    final OzoneRestClient client = helper.client;
+
+    String keyName = helper.putKey().getKeyName();
+    assertNotNull(helper.getBucket());
+    assertNotNull(helper.getFile());
+
+    final String newFileName1 = helper.dir + "/"
+        + OzoneUtils.getRequestID().toLowerCase();
+    final String newFileName2 = helper.dir + "/"
+        + OzoneUtils.getRequestID().toLowerCase();
+
+    Path newPath1 = Paths.get(newFileName1);
+    Path newPath2 = Paths.get(newFileName2);
+
+    helper.getBucket().getKey(keyName, newPath1);
+    // test get key using a more efficient call
+    client.getKey(helper.getVol().getVolumeName(),
+        helper.getBucket().getBucketName(), keyName, newPath2);
+
+    try (FileInputStream original = new FileInputStream(helper.getFile());
+        FileInputStream downloaded1 = new FileInputStream(newPath1.toFile());
+        FileInputStream downloaded2 = new FileInputStream(newPath1.toFile())) {
+      String originalHash = DigestUtils.sha256Hex(original);
+      String downloadedHash1 = DigestUtils.sha256Hex(downloaded1);
+      String downloadedHash2 = DigestUtils.sha256Hex(downloaded2);
+
+      assertEquals(
+          "Sha256 does not match between original file and downloaded file.",
+          originalHash, downloadedHash1);
+      assertEquals(
+          "Sha256 does not match between original file and downloaded file.",
+          originalHash, downloadedHash2);
+
+      // test new get key with invalid volume/bucket name
+      try {
+        client.getKey("invalid-volume", helper.getBucket().getBucketName(),
+            keyName, newPath1);
+        fail("Get key should have thrown " + "when using invalid volume name.");
+      } catch (OzoneException e) {
+        GenericTestUtils
+            .assertExceptionContains(Status.KEY_NOT_FOUND.toString(), e);
+      }
+
+      try {
+        client.getKey(helper.getVol().getVolumeName(), "invalid-bucket",
+            keyName, newPath1);
+        fail("Get key should have thrown " + "when using invalid bucket name.");
+      } catch (OzoneException e) {
+        GenericTestUtils.assertExceptionContains(
+            Status.KEY_NOT_FOUND.toString(), e);
+      }
+    }
+  }
+
+  @Test
+  public void testPutAndDeleteKey() throws Exception {
+    runTestPutAndDeleteKey(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAscii(1);
+    runTestPutAndDeleteKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  static void runTestPutAndDeleteKey(PutHelper helper) throws Exception {
+    String keyName = helper.putKey().getKeyName();
+    assertNotNull(helper.getBucket());
+    assertNotNull(helper.getFile());
+    helper.getBucket().deleteKey(keyName);
+
+    try {
+      helper.getBucket().getKey(keyName);
+      fail("Get Key on a deleted key should have thrown");
+    } catch (OzoneException ex) {
+      GenericTestUtils.assertExceptionContains(
+          Status.KEY_NOT_FOUND.toString(), ex);
+    }
+  }
+
+  @Test
+  public void testPutAndListKey() throws Exception {
+    runTestPutAndListKey(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAscii(1);
+    runTestPutAndListKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  static void runTestPutAndListKey(PutHelper helper) throws Exception {
+    final OzoneRestClient client = helper.client;
+    helper.putKey();
+    assertNotNull(helper.getBucket());
+    assertNotNull(helper.getFile());
+
+    // add keys [list-key0, list-key1, ..., list-key9]
+    for (int x = 0; x < 10; x++) {
+      String newkeyName = "list-key" + x;
+      helper.getBucket().putKey(newkeyName, helper.getFile());
+    }
+
+    List<OzoneKey> keyList1 = helper.getBucket().listKeys("100", null, null);
+    // test list key using a more efficient call
+    List<OzoneKey> keyList2 = client.listKeys(helper.getVol().getVolumeName(),
+        helper.getBucket().getBucketName(), "100", null, null);
+
+    Assert.assertEquals(11, keyList1.size());
+    Assert.assertEquals(11, keyList2.size());
+    // Verify the key creation/modification time. Here we compare the time in
+    // second unit since the date string reparsed to millisecond will
+    // lose precision.
+    for (OzoneKey key : keyList1) {
+      assertTrue((OzoneUtils.formatDate(key.getObjectInfo().getCreatedOn())
+          / 1000) >= (currentTime / 1000));
+      assertTrue((OzoneUtils.formatDate(key.getObjectInfo().getModifiedOn())
+          / 1000) >= (currentTime / 1000));
+    }
+
+    for (OzoneKey key : keyList2) {
+      assertTrue((OzoneUtils.formatDate(key.getObjectInfo().getCreatedOn())
+          / 1000) >= (currentTime / 1000));
+      assertTrue((OzoneUtils.formatDate(key.getObjectInfo().getModifiedOn())
+          / 1000) >= (currentTime / 1000));
+    }
+
+    // test maxLength parameter of list keys
+    keyList1 = helper.getBucket().listKeys("1", null, null);
+    keyList2 = client.listKeys(helper.getVol().getVolumeName(),
+        helper.getBucket().getBucketName(), "1", null, null);
+    Assert.assertEquals(1, keyList1.size());
+    Assert.assertEquals(1, keyList2.size());
+
+    // test startKey parameter of list keys
+    keyList1 = helper.getBucket().listKeys("100", "list-key4", "list-key");
+    keyList2 = client.listKeys(helper.getVol().getVolumeName(),
+        helper.getBucket().getBucketName(), "100", "list-key4", "list-key");
+    Assert.assertEquals(5, keyList1.size());
+    Assert.assertEquals(5, keyList2.size());
+
+    // test prefix parameter of list keys
+    keyList1 = helper.getBucket().listKeys("100", null, "list-key2");
+    keyList2 = client.listKeys(helper.getVol().getVolumeName(),
+        helper.getBucket().getBucketName(), "100", null, "list-key2");
+    Assert.assertTrue(keyList1.size() == 1
+        && keyList1.get(0).getObjectInfo().getKeyName().equals("list-key2"));
+    Assert.assertTrue(keyList2.size() == 1
+        && keyList2.get(0).getObjectInfo().getKeyName().equals("list-key2"));
+
+    // test new list keys with invalid volume/bucket name
+    try {
+      client.listKeys("invalid-volume", helper.getBucket().getBucketName(),
+          "100", null, null);
+      fail("List keys should have thrown when using invalid volume name.");
+    } catch (OzoneException e) {
+      GenericTestUtils.assertExceptionContains(
+          Status.BUCKET_NOT_FOUND.toString(), e);
+    }
+
+    try {
+      client.listKeys(helper.getVol().getVolumeName(), "invalid-bucket", "100",
+          null, null);
+      fail("List keys should have thrown when using invalid bucket name.");
+    } catch (OzoneException e) {
+      GenericTestUtils.assertExceptionContains(
+          Status.BUCKET_NOT_FOUND.toString(), e);
+    }
+  }
+
+  @Test
+  public void testGetKeyInfo() throws Exception {
+    runTestGetKeyInfo(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAscii(1);
+    runTestGetKeyInfo(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  static void runTestGetKeyInfo(PutHelper helper) throws Exception {
+    String keyName = helper.putKey().getKeyName();
+    assertNotNull(helper.getBucket());
+    assertNotNull(helper.getFile());
+
+    OzoneKey keyInfo = helper.getBucket().getKeyInfo(keyName);
+    assertNotNull(keyInfo.getObjectInfo());
+    assertEquals(keyName, keyInfo.getObjectInfo().getKeyName());
+
+    // Compare the time in second unit since the date string reparsed to
+    // millisecond will lose precision.
+    Assert.assertTrue(
+        (OzoneUtils.formatDate(keyInfo.getObjectInfo().getCreatedOn())
+            / 1000) >= (currentTime / 1000));
+    Assert.assertTrue(
+        (OzoneUtils.formatDate(keyInfo.getObjectInfo().getModifiedOn())
+            / 1000) >= (currentTime / 1000));
+  }
+
+  // Volume, bucket, keys info that helps for test create/delete keys.
+  private static class BucketKeys {
+
+    private Map<Pair<String, String>, List<String>> buckets;
+
+    BucketKeys() {
+      buckets = Maps.newHashMap();
+    }
+
+    void addKey(String volume, String bucket, String key) {
+      // check if this bucket exists
+      for (Map.Entry<Pair<String, String>, List<String>> entry :
+          buckets.entrySet()) {
+        if (entry.getKey().getValue().equals(bucket)) {
+          entry.getValue().add(key);
+          return;
+        }
+      }
+
+      // bucket not exist
+      Pair<String, String> newBucket = new ImmutablePair(volume, bucket);
+      List<String> keyList = Lists.newArrayList();
+      keyList.add(key);
+      buckets.put(newBucket, keyList);
+    }
+
+    Set<Pair<String, String>> getAllBuckets() {
+      return buckets.keySet();
+    }
+
+    List<String> getBucketKeys(String bucketName) {
+      for (Map.Entry<Pair<String, String>, List<String>> entry : buckets
+          .entrySet()) {
+        if (entry.getKey().getValue().equals(bucketName)) {
+          return entry.getValue();
+        }
+      }
+      return Lists.newArrayList();
+    }
+
+    int totalNumOfKeys() {
+      int count = 0;
+      for (Map.Entry<Pair<String, String>, List<String>> entry : buckets
+          .entrySet()) {
+        count += entry.getValue().size();
+      }
+      return count;
+    }
+  }
+
+  private int countKsmKeys(KeySpaceManager ksm) throws IOException {
+    int totalCount = 0;
+    List<KsmVolumeArgs> volumes =
+        ksm.listAllVolumes(null, null, Integer.MAX_VALUE);
+    for (KsmVolumeArgs volume : volumes) {
+      List<KsmBucketInfo> buckets =
+          ksm.listBuckets(volume.getVolume(), null, null, Integer.MAX_VALUE);
+      for (KsmBucketInfo bucket : buckets) {
+        List<KsmKeyInfo> keys = ksm.listKeys(bucket.getVolumeName(),
+            bucket.getBucketName(), null, null, Integer.MAX_VALUE);
+        totalCount += keys.size();
+      }
+    }
+    return totalCount;
+  }
+
+  @Test
+  public void testDeleteKey() throws Exception {
+    KeySpaceManager ksm = ozoneCluster.getKeySpaceManager();
+    // To avoid interference from other test cases,
+    // we collect number of existing keys at the beginning
+    int numOfExistedKeys = countKsmKeys(ksm);
+
+    // Keep tracking bucket keys info while creating them
+    PutHelper helper = new PutHelper(ozoneRestClient, path);
+    BucketKeys bucketKeys = new BucketKeys();
+    for (int i = 0; i < 20; i++) {
+      KsmKeyArgs keyArgs = helper.putKey();
+      bucketKeys.addKey(keyArgs.getVolumeName(), keyArgs.getBucketName(),
+          keyArgs.getKeyName());
+    }
+
+    // There should be 20 keys in the buckets we just created.
+    Assert.assertEquals(20, bucketKeys.totalNumOfKeys());
+
+    int numOfCreatedKeys = 0;
+    OzoneContainer cm = ozoneCluster.getHddsDatanodes().get(0)
+        .getDatanodeStateMachine().getContainer();
+
+    // Expected to delete chunk file list.
+    List<File> expectedChunkFiles = Lists.newArrayList();
+    // Iterate over all buckets, and list all keys in each bucket,
+    // count the total number of created keys.
+    Set<Pair<String, String>> buckets = bucketKeys.getAllBuckets();
+    for (Pair<String, String> buk : buckets) {
+      List<KsmKeyInfo> createdKeys =
+          ksm.listKeys(buk.getKey(), buk.getValue(), null, null, 20);
+
+      // Memorize chunks that has been created,
+      // so we can verify actual deletions at DN side later.
+      for (KsmKeyInfo keyInfo : createdKeys) {
+        List<KsmKeyLocationInfo> locations =
+            keyInfo.getLatestVersionLocations().getLocationList();
+        for (KsmKeyLocationInfo location : locations) {
+          String containerName = location.getContainerName();
+          KeyData keyData = new KeyData(containerName, location.getBlockID());
+          KeyData blockInfo = cm.getContainerManager()
+              .getKeyManager().getKey(keyData);
+          ContainerData containerData = cm.getContainerManager()
+              .readContainer(containerName);
+          File dataDir = ContainerUtils
+              .getDataDirectory(containerData).toFile();
+          for (ContainerProtos.ChunkInfo chunkInfo : blockInfo.getChunks()) {
+            File chunkFile = dataDir.toPath()
+                .resolve(chunkInfo.getChunkName()).toFile();
+            System.out.println("Chunk File created: "
+                + chunkFile.getAbsolutePath());
+            Assert.assertTrue(chunkFile.exists());
+            expectedChunkFiles.add(chunkFile);
+          }
+        }
+      }
+      numOfCreatedKeys += createdKeys.size();
+    }
+
+    // Ensure all keys are created.
+    Assert.assertEquals(20, numOfCreatedKeys);
+
+    // Ensure all keys are visible from KSM.
+    // Total number should be numOfCreated + numOfExisted
+    Assert.assertEquals(20 + numOfExistedKeys, countKsmKeys(ksm));
+
+    // Delete 10 keys
+    int delCount = 20;
+    Set<Pair<String, String>> allBuckets = bucketKeys.getAllBuckets();
+    for (Pair<String, String> bucketInfo : allBuckets) {
+      List<String> bks = bucketKeys.getBucketKeys(bucketInfo.getValue());
+      for (String keyName : bks) {
+        if (delCount > 0) {
+          KsmKeyArgs arg =
+              new KsmKeyArgs.Builder().setVolumeName(bucketInfo.getKey())
+                  .setBucketName(bucketInfo.getValue()).setKeyName(keyName)
+                  .build();
+          ksm.deleteKey(arg);
+          delCount--;
+        }
+      }
+    }
+
+    // It should be pretty quick that keys are removed from KSM namespace,
+    // because actual deletion happens in async mode.
+    GenericTestUtils.waitFor(() -> {
+      try {
+        int num = countKsmKeys(ksm);
+        return num == (numOfExistedKeys);
+      } catch (IOException e) {
+        return false;
+      }
+    }, 1000, 10000);
+
+    // It might take a while until all blocks are actually deleted,
+    // verify all chunk files created earlier are removed from disk.
+    GenericTestUtils.waitFor(
+        () -> expectedChunkFiles.stream().allMatch(file -> !file.exists()),
+        1000, 60000);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeysRatis.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeysRatis.java
new file mode 100644
index 0000000..802cc3d
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeysRatis.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.RatisTestHelper;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.Ignore;
+import org.junit.rules.Timeout;
+
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.PutHelper;
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.getMultiPartKey;
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.runTestGetKeyInfo;
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.runTestPutAndDeleteKey;
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.runTestPutAndGetKey;
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.runTestPutAndGetKeyWithDnRestart;
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.runTestPutAndListKey;
+import static org.apache.hadoop.ozone.web.client
+    .TestKeys.runTestPutKey;
+
+/** The same as {@link TestKeys} except that this test is Ratis enabled. */
+public class TestKeysRatis {
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+  private static RatisTestHelper.RatisTestSuite suite;
+  private static MiniOzoneCluster ozoneCluster = null;
+  static private String path;
+  private static OzoneRestClient ozoneRestClient = null;
+
+  @BeforeClass
+  public static void init() throws Exception {
+    suite = new RatisTestHelper.RatisTestSuite(TestBucketsRatis.class);
+    path = suite.getConf().get(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT);
+    ozoneCluster = suite.getCluster();
+    ozoneCluster.waitForClusterToBeReady();
+    ozoneRestClient = suite.newOzoneRestClient();
+  }
+
+  /**
+   * shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (suite != null) {
+      suite.close();
+    }
+  }
+
+
+  @Test
+  public void testPutKey() throws Exception {
+    runTestPutKey(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAlphanumeric(1);
+    runTestPutKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  @Ignore("disabling for now, datanodes restart with ratis is buggy")
+  @Test
+  public void testPutAndGetKeyWithDnRestart() throws Exception {
+    runTestPutAndGetKeyWithDnRestart(
+        new PutHelper(ozoneRestClient, path), ozoneCluster);
+    String delimiter = RandomStringUtils.randomAlphanumeric(1);
+    runTestPutAndGetKeyWithDnRestart(
+        new PutHelper(ozoneRestClient, path, getMultiPartKey(delimiter)),
+        ozoneCluster);
+  }
+
+  @Test
+  public void testPutAndGetKey() throws Exception {
+    runTestPutAndGetKey(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAlphanumeric(1);
+    runTestPutAndGetKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  @Test
+  public void testPutAndDeleteKey() throws Exception  {
+    runTestPutAndDeleteKey(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAlphanumeric(1);
+    runTestPutAndDeleteKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  @Test
+  public void testPutAndListKey() throws Exception {
+    runTestPutAndListKey(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAlphanumeric(1);
+    runTestPutAndListKey(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+
+  @Test
+  public void testGetKeyInfo() throws Exception {
+    runTestGetKeyInfo(new PutHelper(ozoneRestClient, path));
+    String delimiter = RandomStringUtils.randomAlphanumeric(1);
+    runTestGetKeyInfo(new PutHelper(ozoneRestClient, path,
+        getMultiPartKey(delimiter)));
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java
new file mode 100644
index 0000000..627826e
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java
@@ -0,0 +1,305 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.client;
+
+import io.netty.bootstrap.Bootstrap;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.channel.socket.nio.NioSocketChannel;
+import io.netty.handler.codec.http.DefaultFullHttpRequest;
+import io.netty.handler.codec.http.FullHttpRequest;
+import io.netty.handler.codec.http.HttpClientCodec;
+import io.netty.handler.codec.http.HttpContent;
+import io.netty.handler.codec.http.HttpContentDecompressor;
+import io.netty.handler.codec.http.HttpMethod;
+import io.netty.handler.codec.http.HttpObject;
+import io.netty.handler.codec.http.HttpResponse;
+import io.netty.handler.codec.http.HttpResponseStatus;
+import io.netty.handler.codec.http.HttpVersion;
+import io.netty.handler.codec.http.LastHttpContent;
+import io.netty.handler.logging.LogLevel;
+import io.netty.handler.logging.LoggingHandler;
+
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Time;
+import org.apache.http.HttpEntity;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+import org.apache.http.util.EntityUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import javax.ws.rs.core.HttpHeaders;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Locale;
+import java.util.UUID;
+
+import static io.netty.util.CharsetUtil.UTF_8;
+
+/**
+ * Unit tests for Ozone client connection reuse with Apache HttpClient and Netty
+ * based HttpClient.
+ */
+public class TestOzoneClient {
+  private static Logger log = Logger.getLogger(TestOzoneClient.class);
+  private static int testVolumeCount = 5;
+  private static MiniOzoneCluster cluster = null;
+  private static String endpoint = null;
+
+  @BeforeClass
+  public static void init() throws Exception {
+    Logger.getLogger("log4j.logger.org.apache.http").setLevel(Level.ALL);
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.set(OzoneConfigKeys.OZONE_HANDLER_TYPE_KEY,
+        OzoneConsts.OZONE_HANDLER_DISTRIBUTED);
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    int port = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+    endpoint = String.format("http://localhost:%d", port);
+  }
+
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test(timeout = 5000)
+  public void testNewConnectionPerRequest()
+      throws IOException, URISyntaxException {
+    for (int i = 0; i < testVolumeCount; i++) {
+      try (CloseableHttpClient httpClient =
+               HttpClients.createDefault()) {
+        createVolume(getRandomVolumeName(i), httpClient);
+      }
+    }
+  }
+
+  /**
+   * Object handler should be able to serve multiple requests from
+   * a single http client. This allows the client side to reuse
+   * http connections in a connection pool instead of creating a new
+   * connection per request which consumes resource heavily.
+   *
+   */
+  @Test(timeout = 5000)
+  public void testReuseWithApacheHttpClient()
+      throws IOException, URISyntaxException {
+
+    PoolingHttpClientConnectionManager cm =
+        new PoolingHttpClientConnectionManager();
+    cm.setMaxTotal(200);
+    cm.setDefaultMaxPerRoute(20);
+
+    try (CloseableHttpClient httpClient =
+             HttpClients.custom().setConnectionManager(cm).build()) {
+      for (int i = 0; i < testVolumeCount; i++) {
+        createVolume(getRandomVolumeName(i), httpClient);
+      }
+    }
+  }
+
+  @Test(timeout = 10000)
+  public void testReuseWithNettyHttpClient()
+      throws IOException, InterruptedException, URISyntaxException {
+    URI uri = new URI(endpoint);
+    String host = uri.getHost() == null? "127.0.0.1" : uri.getHost();
+    int port = uri.getPort();
+
+    EventLoopGroup workerGroup = new NioEventLoopGroup();
+    try {
+      Bootstrap b = new Bootstrap();
+      b.group(workerGroup)
+          .channel(NioSocketChannel.class)
+          .option(ChannelOption.SO_KEEPALIVE, true)
+          .option(ChannelOption.SO_REUSEADDR, true)
+          .handler(new ChannelInitializer<SocketChannel>() {
+            /**
+             * This method will be called once the {@link Channel} was
+             * registered. After the method returns this instance
+             * will be removed from the {@link ChannelPipeline}
+             * of the {@link Channel}.
+             *
+             * @param ch the {@link Channel} which was registered.
+             * @throws Exception is thrown if an error occurs.
+             * In that case the {@link Channel} will be closed.
+             */
+            @Override
+            public void initChannel(SocketChannel ch) {
+              ChannelPipeline p = ch.pipeline();
+
+              // Comment the following line if you don't want client http trace
+              p.addLast("log", new LoggingHandler(LogLevel.INFO));
+              p.addLast(new HttpClientCodec());
+              p.addLast(new HttpContentDecompressor());
+              p.addLast(new NettyHttpClientHandler());
+            }
+          });
+
+      Channel ch = b.connect(host, port).sync().channel();
+      for (int i = 0; i < testVolumeCount; i++) {
+        String volumeName = getRandomVolumeName(i);
+        try {
+          sendNettyCreateVolumeRequest(ch, volumeName);
+          Thread.sleep(1000);
+        } catch (Exception e) {
+          e.printStackTrace();
+        }
+      }
+
+      Thread.sleep(1000);
+      ch.close();
+      // Wait for the server to close the connection.
+      ch.closeFuture().sync();
+    } catch (Exception ex) {
+      log.error("Error received in client setup", ex);
+    }finally {
+      workerGroup.shutdownGracefully();
+    }
+  }
+
+  class NettyHttpClientHandler extends
+      SimpleChannelInboundHandler<HttpObject> {
+
+    @Override
+    public void channelRead0(ChannelHandlerContext ctx, HttpObject msg) {
+      if (msg instanceof HttpResponse) {
+        HttpResponse response = (HttpResponse) msg;
+        log.info("STATUS: " + response.getStatus());
+        log.info("VERSION: " + response.getProtocolVersion());
+        Assert.assertEquals(HttpResponseStatus.CREATED.code(),
+            response.getStatus().code());
+      }
+      if (msg instanceof HttpContent) {
+        HttpContent content = (HttpContent) msg;
+        log.info(content.content().toString(UTF_8));
+        if (content instanceof LastHttpContent) {
+          log.info("END OF CONTENT");
+        }
+      }
+    }
+
+    @Override
+    public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
+      log.error("Exception upon channel read", cause);
+      ctx.close();
+    }
+  }
+
+  private String getRandomVolumeName(int index) {
+    UUID id = UUID.randomUUID();
+    return "test-volume-" + index + "-" + id;
+
+  }
+
+  // Prepare the HTTP request and send it over the netty channel.
+  private void sendNettyCreateVolumeRequest(Channel channel, String volumeName)
+      throws URISyntaxException, IOException {
+    URIBuilder builder = new URIBuilder(endpoint);
+    builder.setPath("/" + volumeName);
+    URI uri = builder.build();
+
+    String host = uri.getHost() == null ? "127.0.0.1" : uri.getHost();
+    FullHttpRequest request = new DefaultFullHttpRequest(
+        HttpVersion.HTTP_1_1, HttpMethod.POST, uri.getRawPath());
+
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    request.headers().set(HttpHeaders.HOST, host);
+    request.headers().add(HttpHeaders.CONTENT_TYPE, "application/json");
+    request.headers().set(Header.OZONE_VERSION_HEADER,
+        Header.OZONE_V1_VERSION_HEADER);
+    request.headers().set(HttpHeaders.DATE,
+        format.format(new Date(Time.monotonicNow())));
+    request.headers().set(Header.OZONE_USER,
+        UserGroupInformation.getCurrentUser().getUserName());
+    request.headers().set(HttpHeaders.AUTHORIZATION,
+        Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " "
+            + OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+    // Send the HTTP request via netty channel.
+    channel.writeAndFlush(request);
+  }
+
+  // It is caller's responsibility to close the client.
+  private void createVolume(String volumeName, CloseableHttpClient httpClient)
+      throws IOException, URISyntaxException {
+    HttpPost create1 =
+        getCreateVolumeRequest(volumeName);
+    HttpEntity entity = null;
+    try {
+      CloseableHttpResponse response1 =
+          httpClient.execute(create1);
+      Assert.assertEquals(HttpURLConnection.HTTP_CREATED,
+          response1.getStatusLine().getStatusCode());
+      entity = response1.getEntity();
+    } catch (IOException e) {
+      e.printStackTrace();
+    } finally {
+      EntityUtils.consumeQuietly(entity);
+    }
+  }
+
+  private HttpPost getCreateVolumeRequest(String volumeName)
+      throws URISyntaxException, IOException {
+    URIBuilder builder = new URIBuilder(endpoint);
+    builder.setPath("/" + volumeName);
+    HttpPost httpPost = new HttpPost(builder.build().toString());
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    httpPost.addHeader(Header.OZONE_VERSION_HEADER,
+        Header.OZONE_V1_VERSION_HEADER);
+    httpPost.addHeader(HttpHeaders.DATE,
+        format.format(new Date(Time.monotonicNow())));
+    httpPost.addHeader(Header.OZONE_USER,
+        UserGroupInformation.getCurrentUser().getUserName());
+    httpPost.addHeader(HttpHeaders.AUTHORIZATION,
+        Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME + " "
+            + OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    return httpPost;
+  }
+
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java
new file mode 100644
index 0000000..4cd90c9
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java
@@ -0,0 +1,433 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.Status;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Time;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.mockito.Mockito;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.junit.BeforeClass;
+import org.junit.AfterClass;
+import org.junit.Test;
+import org.junit.Ignore;
+import static org.junit.Assert.fail;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+/**
+ * Test Ozone Volumes Lifecycle.
+ */
+public class TestVolume {
+  private static MiniOzoneCluster cluster = null;
+  private static OzoneRestClient ozoneRestClient = null;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "local" , which uses a local directory to
+   * emulate Ozone backend.
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    String path = GenericTestUtils
+        .getTempPath(TestVolume.class.getSimpleName());
+    path += conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+    FileUtils.deleteDirectory(new File(path));
+
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+    Logger.getLogger("log4j.logger.org.apache.http").setLevel(Level.DEBUG);
+
+    cluster = MiniOzoneCluster.newBuilder(conf).build();
+    cluster.waitForClusterToBeReady();
+    final int port = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+
+    ozoneRestClient = new OzoneRestClient(
+        String.format("http://localhost:%d", port));
+  }
+
+  /**
+   * shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testCreateVolume() throws Exception {
+    runTestCreateVolume(ozoneRestClient);
+  }
+
+  static void runTestCreateVolume(OzoneRestClient client)
+      throws OzoneException, IOException, ParseException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+
+    long currentTime = Time.now();
+    OzoneRestClient mockClient = Mockito.spy(client);
+    List<CloseableHttpClient> mockedClients = mockHttpClients(mockClient);
+    OzoneVolume vol = mockClient.createVolume(volumeName, "bilbo", "100TB");
+    // Verify http clients are properly closed.
+    verifyHttpConnectionClosed(mockedClients);
+
+    assertEquals(vol.getVolumeName(), volumeName);
+    assertEquals(vol.getCreatedby(), "hdfs");
+    assertEquals(vol.getOwnerName(), "bilbo");
+    assertEquals(vol.getQuota().getUnit(), OzoneQuota.Units.TB);
+    assertEquals(vol.getQuota().getSize(), 100);
+
+    // verify the key creation time
+    assertTrue((OzoneUtils.formatDate(vol.getCreatedOn())
+        / 1000) >= (currentTime / 1000));
+
+    // Test create a volume with invalid volume name,
+    // not use Rule here because the test method is static.
+    try {
+      String invalidVolumeName = "#" + OzoneUtils.getRequestID().toLowerCase();
+      client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      mockClient.createVolume(invalidVolumeName, "bilbo", "100TB");
+      fail("Except the volume creation be failed because the"
+          + " volume name starts with an invalid char #");
+    } catch (Exception e) {
+      assertTrue(e instanceof OzoneRestClientException);
+      assertTrue(e.getMessage().contains("Bucket or Volume name"
+          + " has an unsupported character : #"));
+    }
+  }
+
+  @Test
+  public void testCreateDuplicateVolume() throws OzoneException {
+    runTestCreateDuplicateVolume(ozoneRestClient);
+  }
+
+  static void runTestCreateDuplicateVolume(OzoneRestClient client)
+      throws OzoneException {
+    try {
+      client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+      client.createVolume("testvol", "bilbo", "100TB");
+      client.createVolume("testvol", "bilbo", "100TB");
+      assertFalse(true);
+    } catch (OzoneException ex) {
+      // Ozone will throw saying volume already exists
+      GenericTestUtils.assertExceptionContains(
+          Status.VOLUME_ALREADY_EXISTS.toString(), ex);
+    }
+  }
+
+  @Test
+  public void testDeleteVolume() throws OzoneException {
+    runTestDeleteVolume(ozoneRestClient);
+  }
+
+  static void runTestDeleteVolume(OzoneRestClient client)
+      throws OzoneException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    client.deleteVolume(vol.getVolumeName());
+  }
+
+  @Test
+  public void testChangeOwnerOnVolume() throws Exception {
+    runTestChangeOwnerOnVolume(ozoneRestClient);
+  }
+
+  static void runTestChangeOwnerOnVolume(OzoneRestClient client)
+      throws OzoneException, ParseException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    client.setVolumeOwner(volumeName, "frodo");
+    OzoneVolume newVol = client.getVolume(volumeName);
+    assertEquals(newVol.getOwnerName(), "frodo");
+    // verify if the creation time is missing after setting owner operation
+    assertTrue(OzoneUtils.formatDate(newVol.getCreatedOn()) > 0);
+  }
+
+  @Test
+  public void testChangeQuotaOnVolume() throws Exception {
+    runTestChangeQuotaOnVolume(ozoneRestClient);
+  }
+
+  static void runTestChangeQuotaOnVolume(OzoneRestClient client)
+      throws OzoneException, IOException, ParseException {
+    String volumeName = OzoneUtils.getRequestID().toLowerCase();
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    OzoneVolume vol = client.createVolume(volumeName, "bilbo", "100TB");
+    client.setVolumeQuota(volumeName, "1000MB");
+    OzoneVolume newVol = client.getVolume(volumeName);
+    assertEquals(newVol.getQuota().getSize(), 1000);
+    assertEquals(newVol.getQuota().getUnit(), OzoneQuota.Units.MB);
+    // verify if the creation time is missing after setting quota operation
+    assertTrue(OzoneUtils.formatDate(newVol.getCreatedOn()) > 0);
+  }
+
+  @Test
+  public void testListVolume() throws OzoneException, IOException {
+    runTestListVolume(ozoneRestClient);
+  }
+
+  static void runTestListVolume(OzoneRestClient client)
+      throws OzoneException, IOException {
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    for (int x = 0; x < 10; x++) {
+      String volumeName = OzoneUtils.getRequestID().toLowerCase();
+      OzoneVolume vol = client.createVolume(volumeName, "frodo", "100TB");
+      assertNotNull(vol);
+    }
+
+    List<OzoneVolume> ovols = client.listVolumes("frodo");
+    assertTrue(ovols.size() >= 10);
+  }
+
+  // TODO: remove @Ignore below once the problem has been resolved.
+  @Ignore("Takes 3m to run, disable for now.")
+  @Test
+  public void testListVolumePagination() throws OzoneException, IOException {
+    runTestListVolumePagination(ozoneRestClient);
+  }
+
+  static void runTestListVolumePagination(OzoneRestClient client)
+      throws OzoneException, IOException {
+    final int volCount = 2000;
+    final int step = 100;
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    for (int x = 0; x < volCount; x++) {
+      String volumeName = OzoneUtils.getRequestID().toLowerCase();
+      OzoneVolume vol = client.createVolume(volumeName, "frodo", "100TB");
+      assertNotNull(vol);
+    }
+    OzoneVolume prevKey = null;
+    int count = 0;
+    int pagecount = 0;
+    while (count < volCount) {
+      List<OzoneVolume> ovols = client.listVolumes("frodo", null, step,
+          prevKey);
+      count += ovols.size();
+      prevKey = ovols.get(ovols.size() - 1);
+      pagecount++;
+    }
+    assertEquals(volCount / step, pagecount);
+  }
+
+  // TODO: remove @Ignore below once the problem has been resolved.
+  @Ignore
+  @Test
+  public void testListAllVolumes() throws OzoneException, IOException {
+    runTestListAllVolumes(ozoneRestClient);
+  }
+
+  static void runTestListAllVolumes(OzoneRestClient client)
+      throws OzoneException, IOException {
+    final int volCount = 200;
+    final int step = 10;
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    for (int x = 0; x < volCount; x++) {
+      String userName =
+          "frodo" + RandomStringUtils.randomAlphabetic(5).toLowerCase();
+      String volumeName =
+          "vol" + RandomStringUtils.randomAlphabetic(5).toLowerCase();
+      OzoneVolume vol = client.createVolume(volumeName, userName, "100TB");
+      assertNotNull(vol);
+    }
+    OzoneVolume prevKey = null;
+    int count = 0;
+    int pagecount = 0;
+    while (count < volCount) {
+      List<OzoneVolume> ovols = client.listAllVolumes(null, step, prevKey);
+      count += ovols.size();
+      if (ovols.size() > 0) {
+        prevKey = ovols.get(ovols.size() - 1);
+      }
+      pagecount++;
+    }
+    // becasue we are querying an existing ozone store, there will
+    // be volumes created by other tests too. So we should get more page counts.
+    assertEquals(volCount / step, pagecount);
+  }
+
+  @Test
+  public void testListVolumes() throws Exception {
+    runTestListVolumes(ozoneRestClient);
+  }
+
+  static void runTestListVolumes(OzoneRestClient client)
+      throws OzoneException, IOException, ParseException {
+    final int volCount = 20;
+    final String user1 = "test-user-a";
+    final String user2 = "test-user-b";
+
+    long currentTime = Time.now();
+    client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);
+    // Create 20 volumes, 10 for user1 and another 10 for user2.
+    for (int x = 0; x < volCount; x++) {
+      String volumeName;
+      String userName;
+
+      if (x % 2 == 0) {
+        // create volume [test-vol0, test-vol2, ..., test-vol18] for user1
+        userName = user1;
+        volumeName = "test-vol" + x;
+      } else {
+        // create volume [test-vol1, test-vol3, ..., test-vol19] for user2
+        userName = user2;
+        volumeName = "test-vol" + x;
+      }
+      OzoneVolume vol = client.createVolume(volumeName, userName, "100TB");
+      assertNotNull(vol);
+    }
+
+    // list all the volumes belong to user1
+    List<OzoneVolume> volumeList = client.listVolumes(user1,
+        null, 100, StringUtils.EMPTY);
+    assertEquals(10, volumeList.size());
+    // verify the owner name and creation time of volume
+    for (OzoneVolume vol : volumeList) {
+      assertTrue(vol.getOwnerName().equals(user1));
+      assertTrue((OzoneUtils.formatDate(vol.getCreatedOn())
+          / 1000) >= (currentTime / 1000));
+    }
+
+    // test max key parameter of listing volumes
+    volumeList = client.listVolumes(user1, null, 2, StringUtils.EMPTY);
+    assertEquals(2, volumeList.size());
+
+    // test prefix parameter of listing volumes
+    volumeList = client.listVolumes(user1, "test-vol10", 100,
+        StringUtils.EMPTY);
+    assertTrue(volumeList.size() == 1
+        && volumeList.get(0).getVolumeName().equals("test-vol10"));
+
+    volumeList = client.listVolumes(user1, "test-vol1",
+        100, StringUtils.EMPTY);
+    assertEquals(5, volumeList.size());
+
+    // test start key parameter of listing volumes
+    volumeList = client.listVolumes(user2, null, 100, "test-vol15");
+    assertEquals(2, volumeList.size());
+  }
+
+  /**
+   * Returns a list of mocked {@link CloseableHttpClient} used for testing.
+   * The mocked client replaces the actual calls in
+   * {@link OzoneRestClient#newHttpClient()}, it is used to verify
+   * if the invocation of this client is expected. <b>Note</b>, the output
+   * of this method is always used as the input of
+   * {@link TestVolume#verifyHttpConnectionClosed(List)}.
+   *
+   * @param mockedClient mocked ozone client.
+   * @return a list of mocked {@link CloseableHttpClient}.
+   * @throws IOException
+   */
+  private static List<CloseableHttpClient> mockHttpClients(
+      OzoneRestClient mockedClient)
+      throws IOException {
+    List<CloseableHttpClient> spyHttpClients = new ArrayList<>();
+    for (int i = 0; i < 5; i++) {
+      CloseableHttpClient spyHttpClient = Mockito
+          .spy(HddsClientUtils.newHttpClient());
+      spyHttpClients.add(spyHttpClient);
+    }
+
+    List<CloseableHttpClient> nextReturns =
+        new ArrayList<>(spyHttpClients.subList(1, spyHttpClients.size()));
+    Mockito.when(mockedClient.newHttpClient()).thenReturn(
+        spyHttpClients.get(0),
+        nextReturns.toArray(new CloseableHttpClient[nextReturns.size()]));
+    return spyHttpClients;
+  }
+
+  /**
+   * This method is used together with
+   * {@link TestVolume#mockHttpClients(OzoneRestClient)} to verify
+   * if the http client is properly closed. It verifies that as long as
+   * a client calls {@link CloseableHttpClient#execute(HttpUriRequest)} to
+   * send request, then it must calls {@link CloseableHttpClient#close()}
+   * close the http connection.
+   *
+   * @param mockedHttpClients
+   */
+  private static void verifyHttpConnectionClosed(
+      List<CloseableHttpClient> mockedHttpClients) {
+    final AtomicInteger totalCalled = new AtomicInteger();
+    assertTrue(mockedHttpClients.stream().allMatch(closeableHttpClient -> {
+      boolean clientUsed = false;
+      try {
+        verify(closeableHttpClient, times(1)).execute(Mockito.any());
+        totalCalled.incrementAndGet();
+        clientUsed = true;
+      } catch (Throwable e) {
+        // There might be some redundant instances in mockedHttpClients,
+        // it is allowed that a client is not used.
+        return true;
+      }
+
+      if (clientUsed) {
+        try {
+          // If a client is used, ensure the close function is called.
+          verify(closeableHttpClient, times(1)).close();
+          return true;
+        } catch (IOException e) {
+          return false;
+        }
+      } else {
+        return true;
+      }
+    }));
+    System.out.println("Successful connections " + totalCalled.get());
+    assertTrue("The mocked http client should be called at least once.",
+        totalCalled.get() > 0);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolumeRatis.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolumeRatis.java
new file mode 100644
index 0000000..8314851
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolumeRatis.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.client;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.*;
+import org.junit.rules.Timeout;
+
+import java.io.File;
+import java.io.IOException;
+
+/** The same as {@link TestVolume} except that this test is Ratis enabled. */
+@Ignore("Disabling Ratis tests for pipeline work.")
+public class TestVolumeRatis {
+  @Rule
+  public Timeout testTimeout = new Timeout(300000);
+  private static OzoneRestClient ozoneClient;
+  private static MiniOzoneCluster cluster;
+
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+
+    // This enables Ratis in the cluster.
+    conf.setBoolean(OzoneConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY, true);
+
+
+    String path = GenericTestUtils
+        .getTempPath(TestVolume.class.getSimpleName());
+    path += conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+        OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+    FileUtils.deleteDirectory(new File(path));
+
+    conf.set(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT, path);
+    Logger.getLogger("log4j.logger.org.apache.http").setLevel(Level.DEBUG);
+
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(3).build();
+    cluster.waitForClusterToBeReady();
+    final int port = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails().getOzoneRestPort();
+
+    ozoneClient = new OzoneRestClient(
+        String.format("http://localhost:%d", port));
+  }
+
+  @AfterClass
+  public static void shutdown() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+
+  }
+
+  @Test
+  public void testCreateVolume() throws Exception {
+    TestVolume.runTestCreateVolume(ozoneClient);
+  }
+
+  @Test
+  public void testCreateDuplicateVolume() throws OzoneException {
+    TestVolume.runTestCreateDuplicateVolume(ozoneClient);
+  }
+
+  @Test
+  public void testDeleteVolume() throws OzoneException {
+    TestVolume.runTestDeleteVolume(ozoneClient);
+  }
+
+  @Test
+  public void testChangeOwnerOnVolume() throws Exception {
+    TestVolume.runTestChangeOwnerOnVolume(ozoneClient);
+  }
+
+  @Test
+  public void testChangeQuotaOnVolume() throws Exception {
+    TestVolume.runTestChangeQuotaOnVolume(ozoneClient);
+  }
+
+  // TODO: remove @Ignore below once the problem has been resolved.
+  @Ignore("listVolumes not implemented in DistributedStorageHandler")
+  @Test
+  public void testListVolume() throws OzoneException, IOException {
+    TestVolume.runTestListVolume(ozoneClient);
+  }
+
+  // TODO: remove @Ignore below once the problem has been resolved.
+  @Ignore("See TestVolume.testListVolumePagination()")
+  @Test
+  public void testListVolumePagination() throws OzoneException, IOException {
+    TestVolume.runTestListVolumePagination(ozoneClient);
+  }
+
+  // TODO: remove @Ignore below once the problem has been resolved.
+  @Ignore("See TestVolume.testListAllVolumes()")
+  @Test
+  public void testListAllVolumes() throws Exception {
+    TestVolume.runTestListAllVolumes(ozoneClient);
+  }
+
+  @Ignore("Disabling Ratis tests for pipeline work.")
+  @Test
+  public void testListVolumes() throws Exception {
+    TestVolume.runTestListVolumes(ozoneClient);
+  }
+}
diff --git a/hadoop-ozone/integration-test/src/test/resources/log4j.properties b/hadoop-ozone/integration-test/src/test/resources/log4j.properties
new file mode 100644
index 0000000..cad9dd1
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/resources/log4j.properties
@@ -0,0 +1,18 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+# log4j configuration used during build and unit tests
+
+log4j.rootLogger=info,stdout
+log4j.threshold=ALL
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
\ No newline at end of file
diff --git a/hadoop-ozone/integration-test/src/test/resources/webapps/ksm/.gitkeep b/hadoop-ozone/integration-test/src/test/resources/webapps/ksm/.gitkeep
new file mode 100644
index 0000000..09697dc
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/resources/webapps/ksm/.gitkeep
@@ -0,0 +1,15 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/hadoop-ozone/integration-test/src/test/resources/webapps/scm/.gitkeep b/hadoop-ozone/integration-test/src/test/resources/webapps/scm/.gitkeep
new file mode 100644
index 0000000..09697dc
--- /dev/null
+++ b/hadoop-ozone/integration-test/src/test/resources/webapps/scm/.gitkeep
@@ -0,0 +1,15 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/hadoop-ozone/objectstore-service/pom.xml b/hadoop-ozone/objectstore-service/pom.xml
new file mode 100644
index 0000000..43da657
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/pom.xml
@@ -0,0 +1,69 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-ozone</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-ozone-objectstore-service</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone Object Store REST Service</description>
+  <name>Apache Hadoop Ozone Object Store REST Service</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>ozone</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-annotations</artifactId>
+      <version>1.5.9</version>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <version>2.2.0</version>
+      <scope>test</scope>
+    </dependency>
+
+  </dependencies>
+  <build>
+    <plugins>
+    </plugins>
+  </build>
+</project>
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/hdfs/server/datanode/ObjectStoreHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/hdfs/server/datanode/ObjectStoreHandler.java
new file mode 100644
index 0000000..3128d31
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/hdfs/server/datanode/ObjectStoreHandler.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.datanode;
+
+import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForBlockClients;
+import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients;
+import static org.apache.hadoop.ozone.KsmUtils.getKsmAddress;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.*;
+import static com.sun.jersey.api.core.ResourceConfig.PROPERTY_CONTAINER_REQUEST_FILTERS;
+import static com.sun.jersey.api.core.ResourceConfig.FEATURE_TRACE;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.sun.jersey.api.container.ContainerFactory;
+import com.sun.jersey.api.core.ApplicationAdapter;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.ksm.protocolPB
+    .KeySpaceManagerProtocolClientSideTranslatorPB;
+import org.apache.hadoop.ozone.ksm.protocolPB.KeySpaceManagerProtocolPB;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.web.ObjectStoreApplication;
+import org.apache.hadoop.ozone.web.handlers.ServiceFilter;
+import org.apache.hadoop.ozone.web.netty.ObjectStoreJerseyContainer;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .ScmBlockLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.Client;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.storage.DistributedStorageHandler;
+import org.apache.hadoop.ozone.web.localstorage.LocalStorageHandler;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * Implements object store handling within the DataNode process.  This class is
+ * responsible for initializing and maintaining the RPC clients and servers and
+ * the web application required for the object store implementation.
+ */
+public final class ObjectStoreHandler implements Closeable {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ObjectStoreHandler.class);
+
+  private final ObjectStoreJerseyContainer objectStoreJerseyContainer;
+  private final KeySpaceManagerProtocolClientSideTranslatorPB
+      keySpaceManagerClient;
+  private final StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private final ScmBlockLocationProtocolClientSideTranslatorPB
+      scmBlockLocationClient;
+  private final StorageHandler storageHandler;
+
+  /**
+   * Creates a new ObjectStoreHandler.
+   *
+   * @param conf configuration
+   * @throws IOException if there is an I/O error
+   */
+  public ObjectStoreHandler(Configuration conf) throws IOException {
+    String shType = conf.getTrimmed(OZONE_HANDLER_TYPE_KEY,
+        OZONE_HANDLER_TYPE_DEFAULT);
+    LOG.info("ObjectStoreHandler initializing with {}: {}",
+        OZONE_HANDLER_TYPE_KEY, shType);
+    boolean ozoneTrace = conf.getBoolean(OZONE_TRACE_ENABLED_KEY,
+        OZONE_TRACE_ENABLED_DEFAULT);
+
+    // Initialize Jersey container for object store web application.
+    if (OzoneConsts.OZONE_HANDLER_DISTRIBUTED.equalsIgnoreCase(shType)) {
+      RPC.setProtocolEngine(conf, StorageContainerLocationProtocolPB.class,
+          ProtobufRpcEngine.class);
+      long scmVersion =
+          RPC.getProtocolVersion(StorageContainerLocationProtocolPB.class);
+
+      InetSocketAddress scmAddress =
+          getScmAddressForClients(conf);
+      this.storageContainerLocationClient =
+          new StorageContainerLocationProtocolClientSideTranslatorPB(
+              RPC.getProxy(StorageContainerLocationProtocolPB.class, scmVersion,
+              scmAddress, UserGroupInformation.getCurrentUser(), conf,
+              NetUtils.getDefaultSocketFactory(conf),
+              Client.getRpcTimeout(conf)));
+
+      InetSocketAddress scmBlockAddress =
+          getScmAddressForBlockClients(conf);
+      this.scmBlockLocationClient =
+          new ScmBlockLocationProtocolClientSideTranslatorPB(
+              RPC.getProxy(ScmBlockLocationProtocolPB.class, scmVersion,
+                  scmBlockAddress, UserGroupInformation.getCurrentUser(), conf,
+                  NetUtils.getDefaultSocketFactory(conf),
+                  Client.getRpcTimeout(conf)));
+
+      RPC.setProtocolEngine(conf, KeySpaceManagerProtocolPB.class,
+          ProtobufRpcEngine.class);
+      long ksmVersion =
+          RPC.getProtocolVersion(KeySpaceManagerProtocolPB.class);
+      InetSocketAddress ksmAddress = getKsmAddress(conf);
+      this.keySpaceManagerClient =
+          new KeySpaceManagerProtocolClientSideTranslatorPB(
+              RPC.getProxy(KeySpaceManagerProtocolPB.class, ksmVersion,
+              ksmAddress, UserGroupInformation.getCurrentUser(), conf,
+              NetUtils.getDefaultSocketFactory(conf),
+              Client.getRpcTimeout(conf)));
+
+      storageHandler = new DistributedStorageHandler(
+          new OzoneConfiguration(conf),
+          this.storageContainerLocationClient,
+          this.keySpaceManagerClient);
+    } else {
+      if (OzoneConsts.OZONE_HANDLER_LOCAL.equalsIgnoreCase(shType)) {
+        storageHandler = new LocalStorageHandler(conf);
+        this.storageContainerLocationClient = null;
+        this.scmBlockLocationClient = null;
+        this.keySpaceManagerClient = null;
+      } else {
+        throw new IllegalArgumentException(
+            String.format("Unrecognized value for %s: %s,"
+                + " Allowed values are %s,%s",
+                OZONE_HANDLER_TYPE_KEY, shType,
+                OzoneConsts.OZONE_HANDLER_DISTRIBUTED,
+                OzoneConsts.OZONE_HANDLER_LOCAL));
+      }
+    }
+    ApplicationAdapter aa =
+        new ApplicationAdapter(new ObjectStoreApplication());
+    Map<String, Object> settingsMap = new HashMap<>();
+    settingsMap.put(PROPERTY_CONTAINER_REQUEST_FILTERS,
+        ServiceFilter.class.getCanonicalName());
+    settingsMap.put(FEATURE_TRACE, ozoneTrace);
+    aa.setPropertiesAndFeatures(settingsMap);
+    this.objectStoreJerseyContainer = ContainerFactory.createContainer(
+        ObjectStoreJerseyContainer.class, aa);
+    this.objectStoreJerseyContainer.setStorageHandler(storageHandler);
+  }
+
+  /**
+   * Returns the initialized web application container.
+   *
+   * @return initialized web application container
+   */
+  public ObjectStoreJerseyContainer getObjectStoreJerseyContainer() {
+    return this.objectStoreJerseyContainer;
+  }
+
+  /**
+   * Returns the storage handler.
+   *
+   * @return returns the storage handler
+   */
+  public StorageHandler getStorageHandler() {
+    return this.storageHandler;
+  }
+
+  @Override
+  public void close() {
+    LOG.info("Closing ObjectStoreHandler.");
+    storageHandler.close();
+    IOUtils.cleanupWithLogger(LOG, storageContainerLocationClient);
+    IOUtils.cleanupWithLogger(LOG, scmBlockLocationClient);
+    IOUtils.cleanupWithLogger(LOG, keySpaceManagerClient);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/hdfs/server/datanode/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/hdfs/server/datanode/package-info.java
new file mode 100644
index 0000000..e853f67
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/hdfs/server/datanode/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.datanode;
+
+/**
+ * Object store related service inside the datanode.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/OzoneRestUtils.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/OzoneRestUtils.java
new file mode 100644
index 0000000..b13ae30
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/OzoneRestUtils.java
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+import java.util.UUID;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.util.Time;
+
+import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Set of Utility functions used in ozone.
+ */
+@InterfaceAudience.Private
+public final class OzoneRestUtils {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      OzoneRestUtils.class);
+
+
+  private OzoneRestUtils() {
+    // Never constructed
+  }
+
+  /**
+   * Date format that used in ozone. Here the format is thread safe to use.
+   */
+  private static final ThreadLocal<SimpleDateFormat> DATE_FORMAT =
+      new ThreadLocal<SimpleDateFormat>() {
+    @Override
+    protected SimpleDateFormat initialValue() {
+      SimpleDateFormat format = new SimpleDateFormat(
+          OzoneConsts.OZONE_DATE_FORMAT, Locale.US);
+      format.setTimeZone(TimeZone.getTimeZone(OzoneConsts.OZONE_TIME_ZONE));
+
+      return format;
+    }
+  };
+
+  /**
+   * verifies that bucket name / volume name is a valid DNS name.
+   *
+   * @param resName Bucket or volume Name to be validated
+   *
+   * @throws IllegalArgumentException
+   */
+  public static void verifyResourceName(String resName)
+      throws IllegalArgumentException {
+    HddsClientUtils.verifyResourceName(resName);
+  }
+
+  /**
+   * Returns a random Request ID.
+   *
+   * Request ID is returned to the client as well as flows through the system
+   * facilitating debugging on why a certain request failed.
+   *
+   * @return String random request ID
+   */
+  public static String getRequestID() {
+    return UUID.randomUUID().toString();
+  }
+
+
+
+  /**
+   * Basic validate routine to make sure that all the
+   * required headers are in place.
+   *
+   * @param request - http request
+   * @param headers - http headers
+   * @param reqId - request id
+   * @param resource - Resource Name
+   * @param hostname - Hostname
+   *
+   * @throws OzoneException
+   */
+  public static void validate(Request request, HttpHeaders headers,
+                              String reqId, String resource, String hostname)
+      throws OzoneException {
+
+    List<String> ozHeader =
+        headers.getRequestHeader(Header.OZONE_VERSION_HEADER);
+    if (ozHeader == null) {
+      throw ErrorTable
+          .newError(ErrorTable.MISSING_VERSION, reqId, resource, hostname);
+    }
+
+    List<String> date = headers.getRequestHeader(HttpHeaders.DATE);
+    if (date == null) {
+      throw ErrorTable
+          .newError(ErrorTable.MISSING_DATE, reqId, resource, hostname);
+    }
+
+    /*
+    TODO :
+    Ignore the results for time being. Eventually we can validate if the
+    request Date time is too skewed and reject if it is so.
+    */
+    parseDate(date.get(0), reqId, resource, hostname);
+
+  }
+
+  /**
+   * Parses the Date String coming from the Users.
+   *
+   * @param dateString - Date String
+   * @param reqID - Ozone Request ID
+   * @param resource - Resource Name
+   * @param hostname - HostName
+   *
+   * @return - Date
+   *
+   * @throws OzoneException - in case of parsing error
+   */
+  public static synchronized Date parseDate(String dateString, String reqID,
+                                            String resource, String hostname)
+      throws OzoneException {
+    try {
+      return DATE_FORMAT.get().parse(dateString);
+    } catch (ParseException ex) {
+      OzoneException exp =
+          ErrorTable.newError(ErrorTable.BAD_DATE, reqID, resource, hostname);
+      exp.setMessage(ex.getMessage());
+      throw exp;
+    }
+  }
+
+  /**
+   * Returns a response with appropriate OZONE headers and payload.
+   *
+   * @param args - UserArgs or Inherited class
+   * @param statusCode - HttpStatus code
+   * @param payload - Content Body
+   *
+   * @return JAX-RS Response
+   */
+  public static Response getResponse(UserArgs args, int statusCode,
+                                     String payload) {
+    String date = DATE_FORMAT.get().format(new Date(Time.now()));
+    return Response.ok(payload)
+        .header(Header.OZONE_SERVER_NAME, args.getHostName())
+        .header(Header.OZONE_REQUEST_ID, args.getRequestID())
+        .header(HttpHeaders.DATE, date).status(statusCode).build();
+  }
+
+  /**
+   * Returns a response with appropriate OZONE headers and payload.
+   *
+   * @param args - UserArgs or Inherited class
+   * @param statusCode - HttpStatus code
+   * @param stream InputStream
+   *
+   * @return JAX-RS Response
+   */
+  public static Response getResponse(UserArgs args, int statusCode,
+                                     LengthInputStream stream) {
+    String date = DATE_FORMAT.get().format(new Date(Time.now()));
+    return Response.ok(stream, MediaType.APPLICATION_OCTET_STREAM)
+        .header(Header.OZONE_SERVER_NAME, args.getHostName())
+        .header(Header.OZONE_REQUEST_ID, args.getRequestID())
+        .header(HttpHeaders.DATE, date).status(statusCode)
+        .header(HttpHeaders.CONTENT_LENGTH, stream.getLength())
+        .build();
+
+  }
+
+
+
+  /**
+   * Convert time in millisecond to a human readable format required in ozone.
+   * @return a human readable string for the input time
+   */
+  public static String formatTime(long millis) {
+    return DATE_FORMAT.get().format(millis);
+  }
+
+  /**
+   * Convert time in ozone date format to millisecond.
+   * @return time in milliseconds
+   */
+  public static long formatDate(String date) throws ParseException {
+    Preconditions.checkNotNull(date, "Date string should not be null.");
+    return DATE_FORMAT.get().parse(date).getTime();
+  }
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/package-info.java
new file mode 100644
index 0000000..973c8f3
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+/**
+ * Ozone related generic classes.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreApplication.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreApplication.java
new file mode 100644
index 0000000..7dd9a33
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreApplication.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+import org.apache.hadoop.ozone.client.rest.OzoneExceptionMapper;
+import org.apache.hadoop.ozone.web.handlers.BucketHandler;
+import org.apache.hadoop.ozone.web.handlers.KeyHandler;
+import org.apache.hadoop.ozone.web.handlers.ServiceFilter;
+import org.apache.hadoop.ozone.web.handlers.VolumeHandler;
+import org.apache.hadoop.ozone.web.messages.LengthInputStreamMessageBodyWriter;
+import org.apache.hadoop.ozone.web.messages.StringMessageBodyWriter;
+
+import javax.ws.rs.core.Application;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Ozone Application.
+ */
+public class ObjectStoreApplication extends Application {
+  public ObjectStoreApplication() {
+    super();
+  }
+
+  @Override
+  public Set<Class<?>> getClasses() {
+    HashSet<Class<?>> set = new HashSet<>();
+    set.add(BucketHandler.class);
+    set.add(VolumeHandler.class);
+    set.add(KeyHandler.class);
+    set.add(OzoneExceptionMapper.class);
+    set.add(LengthInputStreamMessageBodyWriter.class);
+    set.add(StringMessageBodyWriter.class);
+    return set;
+  }
+
+  @Override
+  public Set<Object> getSingletons() {
+    HashSet<Object> set = new HashSet<>();
+    set.add(ServiceFilter.class);
+    return set;
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/OzoneHddsDatanodeService.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/OzoneHddsDatanodeService.java
new file mode 100644
index 0000000..2283ba6
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/OzoneHddsDatanodeService.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.ozone.HddsDatanodeService;
+import org.apache.hadoop.ozone.web.netty.ObjectStoreRestHttpServer;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.util.ServicePlugin;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * DataNode service plugin implementation to start ObjectStore rest server.
+ */
+public class OzoneHddsDatanodeService implements ServicePlugin {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OzoneHddsDatanodeService.class);
+
+  private Configuration conf;
+  private ObjectStoreHandler handler;
+  private ObjectStoreRestHttpServer objectStoreRestHttpServer;
+
+  @Override
+  public void start(Object service) {
+    if (service instanceof HddsDatanodeService) {
+      try {
+        HddsDatanodeService hddsDatanodeService = (HddsDatanodeService) service;
+        conf = hddsDatanodeService.getConf();
+        handler = new ObjectStoreHandler(conf);
+        objectStoreRestHttpServer = new ObjectStoreRestHttpServer(
+            conf, null, handler);
+        objectStoreRestHttpServer.start();
+        hddsDatanodeService.getDatanodeDetails().setOzoneRestPort(
+            objectStoreRestHttpServer.getHttpAddress().getPort());
+
+      } catch (IOException e) {
+        throw new RuntimeException("Can't start the Object Store Rest server",
+            e);
+      }
+    } else {
+      LOG.error("Not starting {}, as the plugin is not invoked through {}",
+          OzoneHddsDatanodeService.class.getSimpleName(),
+          HddsDatanodeService.class.getSimpleName());
+    }
+  }
+
+
+  @Override
+  public void stop() {
+    try {
+      handler.close();
+    } catch (Exception e) {
+      throw new RuntimeException("Can't stop the Object Store Rest server", e);
+    }
+  }
+
+  @Override
+  public void close() {
+    IOUtils.closeQuietly(objectStoreRestHttpServer);
+    IOUtils.closeQuietly(handler);
+  }
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/exceptions/ErrorTable.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/exceptions/ErrorTable.java
new file mode 100644
index 0000000..16892e7
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/exceptions/ErrorTable.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+
+import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
+import static java.net.HttpURLConnection.HTTP_CONFLICT;
+import static java.net.HttpURLConnection.HTTP_FORBIDDEN;
+import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR;
+import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
+import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED;
+
+/**
+ * Error Table represents the Errors from Ozone Rest API layer.
+ *
+ * Please note : The errors in this table are sorted by the HTTP_ERROR codes
+ * if you add new error codes to this table please follow the same convention.
+ */
+@InterfaceAudience.Private
+public final class ErrorTable {
+
+  /* Error 400 */
+  public static final OzoneException MISSING_VERSION =
+      new OzoneException(HTTP_BAD_REQUEST, "missingVersion",
+                         "x-ozone-version header is required.");
+
+  public static final OzoneException MISSING_DATE =
+      new OzoneException(HTTP_BAD_REQUEST, "missingDate",
+                         "Date header is required.");
+
+  public static final OzoneException BAD_DATE =
+      new OzoneException(HTTP_BAD_REQUEST, "badDate",
+                         "Unable to parse date format.");
+
+  public static final OzoneException MALFORMED_QUOTA =
+      new OzoneException(HTTP_BAD_REQUEST, "malformedQuota",
+                         "Invalid quota specified.");
+
+  public static final OzoneException MALFORMED_ACL =
+      new OzoneException(HTTP_BAD_REQUEST, "malformedACL",
+                         "Invalid ACL specified.");
+
+
+  public static final OzoneException INVALID_VOLUME_NAME =
+      new OzoneException(HTTP_BAD_REQUEST, "invalidVolumeName",
+                         "Invalid volume name.");
+
+  public static final OzoneException INVALID_QUERY_PARAM =
+      new OzoneException(HTTP_BAD_REQUEST, "invalidQueryParam",
+                         "Invalid query parameter.");
+
+  public static final OzoneException INVALID_RESOURCE_NAME =
+      new OzoneException(HTTP_BAD_REQUEST, "invalidResourceName",
+                         "Invalid volume, bucket or key name.");
+
+  public static final OzoneException INVALID_BUCKET_NAME =
+      new OzoneException(HTTP_BAD_REQUEST, "invalidBucketName",
+                         "Invalid bucket name.");
+
+  public static final OzoneException INVALID_KEY =
+      new OzoneException(HTTP_BAD_REQUEST, "invalidKey", "Invalid key.");
+
+  public static final OzoneException INVALID_REQUEST =
+      new OzoneException(HTTP_BAD_REQUEST, "invalidRequest",
+                         "Error in request.");
+
+  public static final OzoneException MALFORMED_BUCKET_VERSION =
+      new OzoneException(HTTP_BAD_REQUEST, "malformedBucketVersion",
+                         "Malformed bucket version or version not unique.");
+
+  public static final OzoneException MALFORMED_STORAGE_TYPE =
+      new OzoneException(HTTP_BAD_REQUEST, "malformedStorageType",
+                         "Invalid storage Type specified.");
+
+  public static final OzoneException MALFORMED_STORAGE_CLASS =
+      new OzoneException(HTTP_BAD_REQUEST, "malformedStorageClass",
+                         "Invalid storage class specified.");
+
+  public static final OzoneException BAD_DIGEST =
+      new OzoneException(HTTP_BAD_REQUEST, "badDigest",
+                         "Content MD5 does not match.");
+
+  public static final OzoneException INCOMPLETE_BODY =
+      new OzoneException(HTTP_BAD_REQUEST, "incompleteBody",
+                         "Content length does not match stream size.");
+
+  public static final OzoneException BAD_AUTHORIZATION =
+      new OzoneException(HTTP_BAD_REQUEST, "badAuthorization",
+                         "Missing authorization or authorization has to be " +
+                             "unique.");
+
+  public static final OzoneException BAD_PROPERTY =
+      new OzoneException(HTTP_BAD_REQUEST, "unknownProperty",
+          "This property is not supported by this server.");
+
+  /* Error 401 */
+  public static final OzoneException UNAUTHORIZED =
+      new OzoneException(HTTP_UNAUTHORIZED, "Unauthorized",
+                         "Access token is missing or invalid token.");
+
+  /* Error 403 */
+  public static final OzoneException ACCESS_DENIED =
+      new OzoneException(HTTP_FORBIDDEN, "accessDenied", "Access denied.");
+
+  /* Error 404 */
+  public static final OzoneException USER_NOT_FOUND =
+      new OzoneException(HTTP_NOT_FOUND, "userNotFound", "Invalid user name.");
+
+  public static final OzoneException VOLUME_NOT_FOUND =
+      new OzoneException(HTTP_NOT_FOUND, "volumeNotFound", "No such volume.");
+
+  /* Error 409 */
+  public static final OzoneException VOLUME_ALREADY_EXISTS =
+      new OzoneException(HTTP_CONFLICT, "volumeAlreadyExists",
+                         "Duplicate volume name.");
+
+  public static final OzoneException BUCKET_ALREADY_EXISTS =
+      new OzoneException(HTTP_CONFLICT, "bucketAlreadyExists",
+                         "Duplicate bucket name.");
+
+  public static final OzoneException VOLUME_NOT_EMPTY =
+      new OzoneException(HTTP_CONFLICT, "volumeNotEmpty",
+                         "Volume must not have any buckets.");
+
+  public static final OzoneException BUCKET_NOT_EMPTY =
+      new OzoneException(HTTP_CONFLICT, "bucketNotEmpty",
+                         "Bucket must not have any keys.");
+
+  public static final OzoneException KEY_OPERATION_CONFLICT =
+      new OzoneException(HTTP_CONFLICT, "keyOperationConflict",
+                         "Conflicting operation on the specified key is going" +
+                             " on.");
+
+  /* Error 500 */
+  public static final OzoneException SERVER_ERROR =
+      new OzoneException(HTTP_INTERNAL_ERROR, "internalServerError",
+                         "Internal server error.");
+
+  /**
+   * Create a new instance of Error.
+   *
+   * @param e Error Template
+   * @param requestID Request ID
+   * @param resource Resource Name
+   * @param hostID hostID
+   *
+   * @return creates a new instance of error based on the template
+   */
+  public static OzoneException newError(OzoneException e, String requestID,
+                                        String resource, String hostID) {
+    OzoneException err =
+        new OzoneException(e.getHttpCode(), e.getShortMessage(),
+                           e.getMessage());
+    err.setRequestId(requestID);
+    err.setResource(resource);
+    err.setHostID(hostID);
+    return err;
+  }
+
+  /**
+   * Create new instance of Error.
+   *
+   * @param e - Error Template
+   * @param args - Args
+   *
+   * @return Ozone Exception
+   */
+  public static OzoneException newError(OzoneException e, UserArgs args) {
+    OzoneException err =
+        new OzoneException(e.getHttpCode(), e.getShortMessage(),
+                           e.getMessage());
+    if (args != null) {
+      err.setRequestId(args.getRequestID());
+      err.setResource(args.getResourceName());
+      err.setHostID(args.getHostName());
+    }
+    return err;
+  }
+
+  /**
+   * Create new instance of Error.
+   *
+   * @param e - Error Template
+   * @param args - Args
+   * @param ex Exception
+   *
+   * @return Ozone Exception
+   */
+  public static OzoneException newError(OzoneException e, UserArgs args,
+                                        Exception ex) {
+    OzoneException err =
+        new OzoneException(e.getHttpCode(), e.getShortMessage(), ex);
+
+    if(args != null) {
+      err.setRequestId(args.getRequestID());
+      err.setResource(args.getResourceName());
+      err.setHostID(args.getHostName());
+    }
+    err.setMessage(ex.getMessage());
+    return err;
+  }
+
+  private ErrorTable() {
+    // Never constructed.
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/exceptions/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/exceptions/package-info.java
new file mode 100644
index 0000000..59cf724
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/exceptions/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.exceptions;
+
+/**
+ This package contains ozone client side libraries.
+ */
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java
new file mode 100644
index 0000000..5d26f69
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java
@@ -0,0 +1,197 @@
+/*
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.OzoneRestUtils;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.web.interfaces.Bucket;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.slf4j.MDC;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.IOException;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_FUNCTION;
+
+
+/**
+ * Bucket Class handles all ozone Bucket related actions.
+ */
+public class BucketHandler implements Bucket {
+  /**
+   * createBucket call handles the POST request for Creating a Bucket.
+   *
+   * @param volume - Volume name
+   * @param bucket - Bucket Name
+   * @param req - Http request
+   * @param info - Uri Info
+   * @param headers - Http headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public Response createBucket(String volume, String bucket, Request req,
+                               UriInfo info, HttpHeaders headers)
+      throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "createBucket");
+    return new BucketProcessTemplate() {
+      @Override
+      public Response doProcess(BucketArgs args)
+          throws OzoneException, IOException {
+        StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+        getAclsFromHeaders(args, false);
+        args.setVersioning(getVersioning(args));
+        args.setStorageType(getStorageType(args));
+        fs.createBucket(args);
+        return OzoneRestUtils.getResponse(args, HTTP_CREATED, "");
+      }
+    }.handleCall(volume, bucket, req, info, headers);
+  }
+
+  /**
+   * updateBucket call handles the PUT request for updating a Bucket.
+   *
+   * There are only three possible actions currently with updateBucket.
+   * They are add/remove on ACLS, Bucket Versioning and  StorageType.
+   *  if you make a call with any other action, update just returns 200 OK.
+   *
+   * @param volume - Storage volume name
+   * @param bucket - Bucket name
+   * @param req - Http request
+   * @param info - Uri Info
+   * @param headers - Http headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public Response updateBucket(String volume, String bucket, Request req,
+                               UriInfo info, HttpHeaders headers)
+      throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "updateBucket");
+    return new BucketProcessTemplate() {
+      @Override
+      public Response doProcess(BucketArgs args)
+          throws OzoneException, IOException {
+        StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+        getAclsFromHeaders(args, true);
+        args.setVersioning(getVersioning(args));
+        args.setStorageType(getStorageType(args));
+
+        if ((args.getAddAcls() != null) || (args.getRemoveAcls() != null)) {
+          fs.setBucketAcls(args);
+        }
+
+        if (args.getVersioning() != OzoneConsts.Versioning.NOT_DEFINED) {
+          fs.setBucketVersioning(args);
+        }
+
+        if (args.getStorageType() != null) {
+          fs.setBucketStorageClass(args);
+        }
+        return OzoneRestUtils.getResponse(args, HTTP_OK, "");
+      }
+    }.handleCall(volume, bucket, req, info, headers);
+  }
+
+  /**
+   * Deletes an empty bucket.
+   *
+   * @param volume Volume name
+   * @param bucket Bucket Name
+   * @param req - Http request
+   * @param info - Uri Info
+   * @param headers - Http headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public Response deleteBucket(String volume, String bucket, Request req,
+                               UriInfo info, HttpHeaders headers)
+      throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "deleteBucket");
+    return new BucketProcessTemplate() {
+      @Override
+      public Response doProcess(BucketArgs args)
+          throws OzoneException, IOException {
+        StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+        fs.deleteBucket(args);
+        return OzoneRestUtils.getResponse(args, HTTP_OK, "");
+      }
+    }.handleCall(volume, bucket, req, info, headers);
+  }
+
+  /**
+   * List Buckets allows the user to list the bucket.
+   *
+   * @param volume - Storage Volume Name
+   * @param bucket - Bucket Name
+   * @param info - Uri Info
+   * @param prefix - Prefix for the keys to be fetched
+   * @param maxKeys - MaxNumber of Keys to Return
+   * @param startPage - Continuation Token
+   * @param req - Http request
+   * @param headers - Http headers
+   *
+   * @return - Json Body
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public Response listBucket(String volume, String bucket, final String info,
+                             final String prefix, final int maxKeys,
+                             final String startPage, Request req,
+                             UriInfo uriInfo, HttpHeaders headers)
+      throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "listBucket");
+    return new BucketProcessTemplate() {
+      @Override
+      public Response doProcess(BucketArgs args)
+          throws OzoneException, IOException {
+        switch (info) {
+        case Header.OZONE_INFO_QUERY_KEY:
+          ListArgs listArgs = new ListArgs(args, prefix, maxKeys, startPage);
+          return getBucketKeysList(listArgs);
+        case Header.OZONE_INFO_QUERY_BUCKET:
+          return getBucketInfoResponse(args);
+        default:
+          OzoneException ozException =
+              ErrorTable.newError(ErrorTable.INVALID_QUERY_PARAM, args);
+          ozException.setMessage("Unrecognized query param : " + info);
+          throw ozException;
+        }
+      }
+    }.handleCall(volume, bucket, req, uriInfo, headers);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java
new file mode 100644
index 0000000..8f31dfd
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java
@@ -0,0 +1,323 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.IOException;
+import java.nio.file.DirectoryNotEmptyException;
+import java.nio.file.FileAlreadyExistsException;
+import java.nio.file.NoSuchFileException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.OzoneRestUtils;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.interfaces.UserAuth;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.ListKeys;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+
+import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_COMPONENT;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_REQUEST;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_RESOURCE;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_USER;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+
+/**
+ * This class abstracts way the repetitive tasks in
+ * Bucket handling code.
+ */
+public abstract class BucketProcessTemplate {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BucketProcessTemplate.class);
+
+  /**
+   * This function serves as the common error handling function
+   * for all bucket related operations.
+   *
+   * @param volume - Volume Name
+   * @param bucket - Bucket Name
+   * @param request - Http Request
+   * @param uriInfo - Http Uri
+   * @param headers - Http Headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  public Response handleCall(String volume, String bucket, Request request,
+                             UriInfo uriInfo, HttpHeaders headers)
+      throws OzoneException {
+    // TODO : Add logging
+    String reqID = OzoneUtils.getRequestID();
+    String hostName = OzoneUtils.getHostName();
+    MDC.put(OZONE_COMPONENT, "ozone");
+    MDC.put(OZONE_REQUEST, reqID);
+    UserArgs userArgs = null;
+    try {
+      userArgs = new UserArgs(reqID, hostName, request, uriInfo, headers);
+
+      OzoneRestUtils.validate(request, headers, reqID, bucket, hostName);
+      OzoneUtils.verifyResourceName(bucket);
+
+      UserAuth auth = UserHandlerBuilder.getAuthHandler();
+      userArgs.setUserName(auth.getUser(userArgs));
+      MDC.put(OZONE_USER, userArgs.getUserName());
+
+      BucketArgs args = new BucketArgs(volume, bucket, userArgs);
+      MDC.put(OZONE_RESOURCE, args.getResourceName());
+      Response response =  doProcess(args);
+      LOG.debug("Success");
+      MDC.clear();
+      return response;
+
+    } catch (IllegalArgumentException argEx) {
+      LOG.error("Invalid bucket.", argEx);
+      throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, userArgs,
+          argEx);
+    } catch (IOException fsExp) {
+      handleIOException(bucket, reqID, hostName, fsExp);
+    }
+    return null;
+  }
+
+  /**
+   * Reads ACLs from headers and throws appropriate exception if needed.
+   *
+   * @param args - bucketArgs
+   *
+   * @throws OzoneException
+   */
+  void getAclsFromHeaders(BucketArgs args, boolean parseRemoveACL)
+      throws OzoneException {
+    try {
+      List<String> acls = getAcls(args, Header.OZONE_ACL_REMOVE);
+      if (acls != null && !acls.isEmpty()) {
+        args.removeAcls(acls);
+      }
+      if ((!parseRemoveACL) && args.getRemoveAcls() != null) {
+        OzoneException ex = ErrorTable.newError(ErrorTable.MALFORMED_ACL, args);
+        ex.setMessage("Invalid Remove ACLs");
+        throw ex;
+      }
+
+      acls = getAcls(args, Header.OZONE_ACL_ADD);
+      if (acls != null && !acls.isEmpty()) {
+        args.addAcls(acls);
+      }
+    } catch (IllegalArgumentException ex) {
+      throw ErrorTable.newError(ErrorTable.MALFORMED_ACL, args, ex);
+    }
+  }
+
+  /**
+   * Converts FileSystem IO exceptions to OZONE exceptions.
+   *
+   * @param bucket Name of the bucket
+   * @param reqID Request ID
+   * @param hostName Machine Name
+   * @param fsExp Exception
+   *
+   * @throws OzoneException
+   */
+  void handleIOException(String bucket, String reqID, String hostName,
+                         IOException fsExp) throws OzoneException {
+    LOG.error("IOException:", fsExp);
+
+    OzoneException exp = null;
+    if (fsExp instanceof FileAlreadyExistsException) {
+      exp = ErrorTable
+          .newError(ErrorTable.BUCKET_ALREADY_EXISTS, reqID, bucket, hostName);
+    }
+
+    if (fsExp instanceof DirectoryNotEmptyException) {
+      exp = ErrorTable
+          .newError(ErrorTable.BUCKET_NOT_EMPTY, reqID, bucket, hostName);
+    }
+
+    if (fsExp instanceof NoSuchFileException) {
+      exp = ErrorTable
+          .newError(ErrorTable.INVALID_BUCKET_NAME, reqID, bucket, hostName);
+    }
+
+    // Default we don't handle this exception yet,
+    // report a Server Internal Error.
+    if (exp == null) {
+      exp =
+          ErrorTable.newError(ErrorTable.SERVER_ERROR, reqID, bucket, hostName);
+      if (fsExp != null) {
+        exp.setMessage(fsExp.getMessage());
+      }
+    }
+    throw exp;
+  }
+
+  /**
+   * Abstract function that gets implemented in the BucketHandler functions.
+   * This function will just deal with the core file system related logic
+   * and will rely on handleCall function for repetitive error checks
+   *
+   * @param args - parsed bucket args, name, userName, ACLs etc
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   * @throws IOException
+   */
+  public abstract Response doProcess(BucketArgs args)
+      throws OzoneException, IOException;
+
+
+  /**
+   * Returns the ACL String if available.
+   * This function ignores all ACLs that are not prefixed with either
+   * ADD or Remove
+   *
+   * @param args - BucketArgs
+   * @param tag - Tag for different type of acls
+   *
+   * @return List of ACLs
+   *
+   */
+  List<String> getAcls(BucketArgs args, String tag)  {
+    List<String> aclStrings =
+        args.getHeaders().getRequestHeader(Header.OZONE_ACLS);
+    List<String> filteredSet = null;
+    if (aclStrings != null) {
+      filteredSet = new LinkedList<>();
+      for (String s : aclStrings) {
+        if (s.startsWith(tag)) {
+          filteredSet.add(s.replaceFirst(tag, ""));
+        }
+      }
+    }
+    return filteredSet;
+  }
+
+  /**
+   * Returns bucket versioning Info.
+   *
+   * @param args - BucketArgs
+   *
+   * @return - String
+   *
+   * @throws OzoneException
+   */
+  OzoneConsts.Versioning getVersioning(BucketArgs args) throws OzoneException {
+
+    List<String> versionStrings =
+        args.getHeaders().getRequestHeader(Header.OZONE_BUCKET_VERSIONING);
+    if (versionStrings == null) {
+      return null;
+    }
+
+    if (versionStrings.size() > 1) {
+      OzoneException ex =
+          ErrorTable.newError(ErrorTable.MALFORMED_BUCKET_VERSION, args);
+      ex.setMessage("Exactly one bucket version header required");
+      throw ex;
+    }
+
+    String version = versionStrings.get(0);
+    try {
+      return OzoneConsts.Versioning.valueOf(version);
+    } catch (IllegalArgumentException ex) {
+      LOG.debug("Malformed Version. version: {}", version);
+      throw ErrorTable.newError(ErrorTable.MALFORMED_BUCKET_VERSION, args, ex);
+    }
+  }
+
+
+  /**
+   * Returns Storage Class if Available or returns Default.
+   *
+   * @param args - bucketArgs
+   *
+   * @return StorageType
+   *
+   * @throws OzoneException
+   */
+  StorageType getStorageType(BucketArgs args) throws OzoneException {
+    List<String> storageClassString = null;
+    try {
+      storageClassString =
+          args.getHeaders().getRequestHeader(Header.OZONE_STORAGE_TYPE);
+
+      if (storageClassString == null) {
+        return null;
+      }
+      if (storageClassString.size() > 1) {
+        OzoneException ex =
+            ErrorTable.newError(ErrorTable.MALFORMED_STORAGE_TYPE, args);
+        ex.setMessage("Exactly one storage class header required");
+        throw ex;
+      }
+      return StorageType.valueOf(storageClassString.get(0).toUpperCase());
+    } catch (IllegalArgumentException ex) {
+      if(storageClassString != null) {
+        LOG.debug("Malformed storage type. Type: {}",
+            storageClassString.get(0).toUpperCase());
+      }
+      throw ErrorTable.newError(ErrorTable.MALFORMED_STORAGE_TYPE, args, ex);
+    }
+  }
+
+  /**
+   * Returns BucketInfo response.
+   *
+   * @param args - BucketArgs
+   *
+   * @return BucketInfo
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  Response getBucketInfoResponse(BucketArgs args)
+      throws IOException, OzoneException {
+    StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+    BucketInfo info = fs.getBucketInfo(args);
+    return OzoneRestUtils.getResponse(args, HTTP_OK, info.toJsonString());
+  }
+
+  /**
+   * Returns list of objects in a bucket.
+   * @param args - ListArgs
+   * @return Response
+   * @throws IOException
+   * @throws OzoneException
+   */
+  Response getBucketKeysList(ListArgs args) throws IOException, OzoneException {
+    StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+    ListKeys objects = fs.listKeys(args);
+    return OzoneRestUtils.getResponse(args.getArgs(), HTTP_OK,
+        objects.toJsonString());
+  }
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyHandler.java
new file mode 100644
index 0000000..d4c5a79
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyHandler.java
@@ -0,0 +1,245 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+import org.apache.hadoop.ozone.OzoneRestUtils;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.web.interfaces.Keys;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+import org.apache.commons.codec.binary.Hex;
+
+/**
+ * KeyHandler deals with basic Key Operations.
+ */
+public class KeyHandler implements Keys {
+
+  /**
+   * Gets the Key/key information if it exists.
+   *
+   * @param volume  Storage Volume
+   * @param bucket  Name of the bucket
+   * @param key Name of the key
+   * @param info Tag info
+   * @param req Request
+   * @param uriInfo Uri Info
+   * @param headers Http Header
+   * @return Response
+   * @throws OzoneException
+   */
+  @Override
+  public Response getKey(String volume, String bucket, String key, String info,
+      Request req, UriInfo uriInfo, HttpHeaders headers)
+      throws OzoneException {
+    return new KeyProcessTemplate() {
+      /**
+       * Abstract function that gets implemented in the KeyHandler functions.
+       * This function will just deal with the core file system related logic
+       * and will rely on handleCall function for repetitive error checks
+       *
+       * @param args - parsed bucket args, name, userName, ACLs etc
+       * @param input - The body as an Input Stream
+       * @param request - Http request
+       * @param headers - Parsed http Headers.
+       * @param uriInfo - UriInfo
+       *
+       * @return Response
+       *
+       * @throws IOException - From the file system operations
+       */
+      @Override
+      public Response doProcess(KeyArgs args, InputStream input,
+                                Request request, HttpHeaders headers,
+                                UriInfo uriInfo)
+          throws IOException, OzoneException, NoSuchAlgorithmException {
+        if (info == null) {
+          return getKey(args);
+        } else if (info.equals(Header.OZONE_INFO_QUERY_KEY)) {
+          return getKeyInfo(args);
+        }
+
+        OzoneException ozException = ErrorTable
+            .newError(ErrorTable.INVALID_QUERY_PARAM, args);
+        ozException.setMessage("Unrecognized query param : " + info);
+        throw ozException;
+      }
+    }.handleCall(volume, bucket, key, req, headers, uriInfo, null);
+  }
+
+  /**
+   * Gets the Key if it exists.
+   */
+  private Response getKey(KeyArgs args)
+      throws IOException, OzoneException {
+    StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+    LengthInputStream stream = fs.newKeyReader(args);
+    return OzoneRestUtils.getResponse(args, HTTP_OK, stream);
+  }
+
+  /**
+   * Gets the Key information if it exists.
+   */
+  private Response getKeyInfo(KeyArgs args)
+      throws IOException, OzoneException {
+    StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+    KeyInfo keyInfo = fs.getKeyInfo(args);
+    return OzoneRestUtils.getResponse(args, HTTP_OK, keyInfo.toJsonString());
+  }
+
+  /**
+   * Adds a key to an existing bucket. If the object already exists this call
+   * will overwrite or add with new version number if the bucket versioning is
+   * turned on.
+   *
+   * @param volume  Storage Volume Name
+   * @param bucket  Name of the bucket
+   * @param keys    Name of the Object
+   * @param is      InputStream or File Data
+   * @param req     Request
+   * @param info    - UriInfo
+   * @param headers http headers
+   * @return Response
+   * @throws OzoneException
+   */
+  @Override
+  public Response putKey(String volume, String bucket, String keys,
+                         InputStream is, Request req, UriInfo info,
+                         HttpHeaders headers) throws OzoneException {
+
+    return new KeyProcessTemplate() {
+      /**
+       * Abstract function that gets implemented in the KeyHandler functions.
+       * This function will just deal with the core file system related logic
+       * and will rely on handleCall function for repetitive error checks
+       *
+       * @param args - parsed bucket args, name, userName, ACLs etc
+       * @param input - The body as an Input Stream
+       * @param request - Http request
+       * @param headers - Parsed http Headers.
+       * @param info - UriInfo
+       *
+       * @return Response
+       *
+       * @throws IOException - From the file system operations
+       */
+      @Override
+      public Response doProcess(KeyArgs args, InputStream input,
+                                Request request, HttpHeaders headers,
+                                UriInfo info)
+          throws IOException, OzoneException, NoSuchAlgorithmException {
+        final int eof = -1;
+        StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+
+        byte[] buffer = new byte[4 * 1024];
+        String contentLenString = getContentLength(headers, args);
+        String newLen = contentLenString.replaceAll("\"", "");
+        int contentLen = Integer.parseInt(newLen);
+        args.setSize(contentLen);
+
+        MessageDigest md5 = MessageDigest.getInstance("MD5");
+        int bytesRead = 0;
+        int len = 0;
+        OutputStream stream = fs.newKeyWriter(args);
+        while ((bytesRead < contentLen) && (len != eof)) {
+          int readSize =
+              (contentLen - bytesRead > buffer.length) ? buffer.length :
+                  contentLen - bytesRead;
+          len = input.read(buffer, 0, readSize);
+          if (len != eof) {
+            stream.write(buffer, 0, len);
+            md5.update(buffer, 0, len);
+            bytesRead += len;
+
+          }
+        }
+
+        checkFileLengthMatch(args, fs, contentLen, bytesRead);
+
+        String hashString = Hex.encodeHexString(md5.digest());
+// TODO : Enable hash value checking.
+//          String contentHash = getContentMD5(headers, args);
+//          checkFileHashMatch(args, hashString, fs, contentHash);
+        args.setHash(hashString);
+        args.setSize(bytesRead);
+        fs.commitKey(args, stream);
+        return OzoneRestUtils.getResponse(args, HTTP_CREATED, "");
+      }
+    }.handleCall(volume, bucket, keys, req, headers, info, is);
+  }
+
+  /**
+   * Deletes an existing key.
+   *
+   * @param volume  Storage Volume Name
+   * @param bucket  Name of the bucket
+   * @param keys    Name of the Object
+   * @param req     http Request
+   * @param info    - UriInfo
+   * @param headers HttpHeaders
+   * @return Response
+   * @throws OzoneException
+   */
+  @Override
+  public Response deleteKey(String volume, String bucket, String keys,
+                            Request req, UriInfo info, HttpHeaders headers)
+      throws OzoneException {
+    return new KeyProcessTemplate() {
+      /**
+       * Abstract function that gets implemented in the KeyHandler functions.
+       * This function will just deal with the core file system related logic
+       * and will rely on handleCall function for repetitive error checks
+       *
+       * @param args - parsed bucket args, name, userName, ACLs etc
+       * @param input - The body as an Input Stream
+       * @param request - Http request
+       * @param headers - Parsed http Headers.
+       * @param info - UriInfo
+       *
+       * @return Response
+       *
+       * @throws IOException - From the file system operations
+       */
+      @Override
+      public Response doProcess(KeyArgs args, InputStream input,
+                                Request request, HttpHeaders headers,
+                                UriInfo info)
+          throws IOException, OzoneException, NoSuchAlgorithmException {
+        StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+        fs.deleteKey(args);
+        return OzoneRestUtils.getResponse(args, HTTP_OK, "");
+      }
+    }.handleCall(volume, bucket, keys, req, headers, info, null);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java
new file mode 100644
index 0000000..ef0293e
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java
@@ -0,0 +1,235 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import org.apache.commons.codec.binary.Base64;
+
+import org.apache.hadoop.ozone.OzoneRestUtils;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.interfaces.UserAuth;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+
+import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.BAD_DIGEST;
+import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.INCOMPLETE_BODY;
+import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.INVALID_BUCKET_NAME;
+import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.INVALID_REQUEST;
+import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.SERVER_ERROR;
+import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.newError;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_COMPONENT;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_RESOURCE;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_REQUEST;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_USER;
+
+
+/**
+ * This class abstracts way the repetitive tasks in  Key handling code.
+ */
+public abstract class KeyProcessTemplate {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(KeyProcessTemplate.class);
+
+  /**
+   * This function serves as the common error handling function for all Key
+   * related operations.
+   *
+   * @param bucket  bucket Name
+   * @param key     the object name
+   * @param headers Http headers
+   * @param is      Input XML stream
+   * @throws OzoneException
+   */
+  public Response handleCall(String volume, String bucket, String key,
+                             Request request, HttpHeaders headers, UriInfo info,
+                             InputStream is) throws OzoneException {
+
+    String reqID = OzoneUtils.getRequestID();
+    String hostName = OzoneUtils.getHostName();
+    MDC.put(OZONE_COMPONENT, "ozone");
+    MDC.put(OZONE_REQUEST, reqID);
+    UserArgs userArgs = null;
+    try {
+      userArgs = new UserArgs(reqID, hostName, request, info, headers);
+      OzoneRestUtils.validate(request, headers, reqID, bucket, hostName);
+      OzoneUtils.verifyResourceName(bucket);
+
+      UserAuth auth = UserHandlerBuilder.getAuthHandler();
+      userArgs.setUserName(auth.getUser(userArgs));
+      MDC.put(OZONE_USER, userArgs.getUserName());
+
+      KeyArgs args = new KeyArgs(volume, bucket, key, userArgs);
+      MDC.put(OZONE_RESOURCE, args.getResourceName());
+      Response response =  doProcess(args, is, request, headers, info);
+      LOG.debug("Success");
+      MDC.clear();
+      return response;
+
+    } catch (IllegalArgumentException argExp) {
+      LOG.error("Invalid bucket in key call.", argExp);
+      throw newError(INVALID_BUCKET_NAME, userArgs, argExp);
+    } catch (IOException fsExp) {
+      LOG.error("IOException:", fsExp);
+      // Map KEY_NOT_FOUND to INVALID_KEY
+      if (fsExp.getMessage().endsWith(
+          KeySpaceManagerProtocolProtos.Status.KEY_NOT_FOUND.name())) {
+        throw ErrorTable.newError(ErrorTable.INVALID_KEY, userArgs, fsExp);
+      }
+
+      // TODO : Handle errors from the FileSystem , let us map to server error
+      // for now.
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, userArgs, fsExp);
+    } catch (NoSuchAlgorithmException algoEx) {
+      LOG.error("NoSuchAlgorithmException. Probably indicates an unusual java "
+          + "installation.", algoEx);
+      throw ErrorTable.newError(SERVER_ERROR, userArgs, algoEx);
+    }
+  }
+
+  /**
+   * Abstract function that gets implemented in the KeyHandler functions. This
+   * function will just deal with the core file system related logic and will
+   * rely on handleCall function for repetitive error checks
+   *
+   * @param args    - parsed bucket args, name, userName, ACLs etc
+   * @param input   - The body as an Input Stream
+   * @param request - Http request
+   * @param headers - Parsed http Headers.
+   * @param info    - UriInfo
+   * @return Response
+   * @throws IOException - From the file system operations
+   */
+  public abstract Response doProcess(KeyArgs args, InputStream input,
+                                     Request request, HttpHeaders headers,
+                                     UriInfo info)
+      throws IOException, OzoneException, NoSuchAlgorithmException;
+
+  /**
+   * checks if the File Content-MD5 we wrote matches the hash we computed from
+   * the stream. if it does match we delete the file and throw and exception to
+   * let the user know that we have a hash mismatch
+   *
+   * @param args           Object Args
+   * @param computedString MD5 hash value
+   * @param fs             Pointer to File System so we can delete the file
+   * @param contentHash    User Specified hash string
+   * @throws IOException
+   * @throws OzoneException
+   */
+  public void checkFileHashMatch(KeyArgs args, String computedString,
+                                 StorageHandler fs, String contentHash)
+      throws IOException, OzoneException {
+    if (contentHash != null) {
+      String contentString =
+          new String(Base64.decodeBase64(contentHash), OzoneUtils.ENCODING)
+              .trim();
+
+      if (!contentString.equals(computedString)) {
+        fs.deleteKey(args);
+        OzoneException ex = ErrorTable.newError(BAD_DIGEST, args);
+        String msg = String.format("MD5 Digest mismatch. Expected %s Found " +
+            "%s", contentString, computedString);
+        ex.setMessage(msg);
+        LOG.debug(msg);
+        throw ex;
+      }
+    }
+  }
+
+  /**
+   * check if the content-length matches the actual stream length. if we find a
+   * mismatch we will delete the file and throw an exception to let the user
+   * know that length mismatch detected
+   *
+   * @param args       Object Args
+   * @param fs         Pointer to File System Object, to delete the file that we
+   *                   wrote
+   * @param contentLen Http Content-Length Header
+   * @param bytesRead  Actual Bytes we read from the stream
+   * @throws IOException
+   * @throws OzoneException
+   */
+  public void checkFileLengthMatch(KeyArgs args, StorageHandler fs,
+                                   int contentLen, int bytesRead)
+      throws IOException, OzoneException {
+    if (bytesRead != contentLen) {
+      fs.deleteKey(args);
+      OzoneException ex = ErrorTable.newError(INCOMPLETE_BODY, args);
+      String msg = String.format("Body length mismatch. Expected length : %d" +
+          " Found %d", contentLen, bytesRead);
+      ex.setMessage(msg);
+      LOG.debug(msg);
+      throw ex;
+    }
+  }
+
+  /**
+   * Returns Content Length header value if available.
+   *
+   * @param headers - Http Headers
+   * @return - String or null
+   */
+  public String getContentLength(HttpHeaders headers, KeyArgs args)
+      throws OzoneException {
+    List<String> contentLengthList =
+        headers.getRequestHeader(HttpHeaders.CONTENT_LENGTH);
+    if ((contentLengthList != null) && (contentLengthList.size() > 0)) {
+      return contentLengthList.get(0);
+    }
+
+    OzoneException ex = ErrorTable.newError(INVALID_REQUEST, args);
+    ex.setMessage("Content-Length is a required header for putting a key.");
+    throw ex;
+
+  }
+
+  /**
+   * Returns Content MD5 value if available.
+   *
+   * @param headers - Http Headers
+   * @return - String or null
+   */
+  public String getContentMD5(HttpHeaders headers, KeyArgs args) {
+    List<String> contentLengthList =
+        headers.getRequestHeader(Header.CONTENT_MD5);
+    if ((contentLengthList != null) && (contentLengthList.size() > 0)) {
+      return contentLengthList.get(0);
+    }
+// TODO : Should we make this compulsory ?
+//    OzoneException ex = ErrorTable.newError(ErrorTable.invalidRequest, args);
+//    ex.setMessage("Content-MD5 is a required header for putting a key");
+//    throw ex;
+    return "";
+  }
+}
+
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/ServiceFilter.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/ServiceFilter.java
new file mode 100644
index 0000000..76aa286
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/ServiceFilter.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import com.sun.jersey.spi.container.ContainerRequest;
+import com.sun.jersey.spi.container.ContainerRequestFilter;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+
+import javax.ws.rs.core.UriBuilder;
+import javax.ws.rs.ext.Provider;
+
+/**
+ * This class is used to intercept root URL requests and route it to
+ * Volume List functionality.
+ */
+@Provider
+public class ServiceFilter implements ContainerRequestFilter {
+  /**
+   * Filter the request.
+   * <p>
+   * An implementation may modify the state of the request or
+   * create a new instance.
+   *
+   * @param request the request.
+   *
+   * @return the request.
+   */
+  @Override
+  public ContainerRequest filter(ContainerRequest request) {
+    if (request.getRequestUri().getPath().length() > 1) {
+      return request;
+    }
+
+    // Just re-route it to volume handler with some hypothetical volume name.
+    // volume name is ignored.
+
+    request.setUris(request.getBaseUri(),
+        UriBuilder.fromUri(request.getRequestUri())
+        .path("/service")
+        .queryParam("info", Header.OZONE_LIST_QUERY_SERVICE)
+        .build());
+
+    return request;
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/StorageHandlerBuilder.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/StorageHandlerBuilder.java
new file mode 100644
index 0000000..f86f247
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/StorageHandlerBuilder.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.localstorage.LocalStorageHandler;
+
+/**
+ * This class is responsible for providing a {@link StorageHandler}
+ * implementation to object store web handlers.
+ */
+@InterfaceAudience.Private
+public final class StorageHandlerBuilder {
+
+  private static final ThreadLocal<StorageHandler>
+      STORAGE_HANDLER_THREAD_LOCAL = new ThreadLocal<>();
+
+  /**
+   * Returns the configured StorageHandler from thread-local storage for this
+   * thread.
+   *
+   * @return StorageHandler from thread-local storage
+   */
+  public static StorageHandler getStorageHandler() {
+    StorageHandler storageHandler = STORAGE_HANDLER_THREAD_LOCAL.get();
+    if (storageHandler != null) {
+      return storageHandler;
+    } else {
+      // This only happens while using mvn jetty:run for testing.
+      Configuration conf = new OzoneConfiguration();
+      return new LocalStorageHandler(conf);
+    }
+  }
+
+  /**
+   * Removes the configured StorageHandler from thread-local storage for this
+   * thread.
+   */
+  public static void removeStorageHandler() {
+    STORAGE_HANDLER_THREAD_LOCAL.remove();
+  }
+
+  /**
+   * Sets the configured StorageHandler in thread-local storage for this thread.
+   *
+   * @param storageHandler StorageHandler to set in thread-local storage
+   */
+  public static void setStorageHandler(StorageHandler storageHandler) {
+    STORAGE_HANDLER_THREAD_LOCAL.set(storageHandler);
+  }
+
+  /**
+   * There is no reason to instantiate this class.
+   */
+  private StorageHandlerBuilder() {
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/UserHandlerBuilder.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/UserHandlerBuilder.java
new file mode 100644
index 0000000..d9051f3
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/UserHandlerBuilder.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.web.interfaces.UserAuth;
+import org.apache.hadoop.ozone.web.userauth.Simple;
+
+/**
+ * This class is responsible for providing a
+ * {@link org.apache.hadoop.ozone.web.interfaces.UserAuth}
+ * implementation to object store web handlers.
+ */
+@InterfaceAudience.Private
+public final class UserHandlerBuilder {
+
+  private static final ThreadLocal<UserAuth> USER_AUTH_THREAD_LOCAL =
+      new ThreadLocal<UserAuth>();
+
+  /**
+   * Returns the configured UserAuth from thread-local storage for this
+   * thread.
+   *
+   * @return UserAuth from thread-local storage
+   */
+  public static UserAuth getAuthHandler() {
+    UserAuth authHandler = USER_AUTH_THREAD_LOCAL.get();
+    if (authHandler != null) {
+      return authHandler;
+    } else {
+      // This only happens while using mvn jetty:run for testing.
+      return new Simple();
+    }
+  }
+
+  /**
+   * Removes the configured UserAuth from thread-local storage for this
+   * thread.
+   */
+  public static void removeAuthHandler() {
+    USER_AUTH_THREAD_LOCAL.remove();
+  }
+
+  /**
+   * Sets the configured UserAuthHandler in thread-local storage for this
+   * thread.
+   *
+   * @param authHandler authHandler to set in thread-local storage
+   */
+  public static void setAuthHandler(UserAuth authHandler) {
+    USER_AUTH_THREAD_LOCAL.set(authHandler);
+  }
+
+  /**
+   * There is no reason to instantiate this class.
+   */
+  private UserHandlerBuilder() {
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java
new file mode 100644
index 0000000..002f289
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java
@@ -0,0 +1,272 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.OzoneRestUtils;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.interfaces.UserAuth;
+import org.apache.hadoop.ozone.web.interfaces.Volume;
+
+import static java.net.HttpURLConnection.HTTP_CREATED;
+import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_FUNCTION;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+
+/**
+ * VolumeHandler handles volume specific HTTP calls.
+ *
+ * Most functions in this file follow a simple pattern.
+ * All calls are handled by VolumeProcessTemplate.handleCall, which
+ * calls back into doProcess function.
+ *
+ * Everything common to volume handling is abstracted out in handleCall function
+ * For Example : Checking that volume name is sane, we have a supported
+ * ozone version number and a valid date. That is everything common is in
+ * handleCall and actions specific to a call is inside doProcess callback.
+ */
+@InterfaceAudience.Private
+public class VolumeHandler implements Volume {
+  private static final Logger LOG = LoggerFactory.getLogger(VolumeHandler
+      .class);
+  /**
+   * Creates a volume.
+   *
+   * @param volume Volume Name, this has to be unique at Ozone cluster level
+   * @param quota Quota for this Storage Volume - <int>(<BYTES|MB|GB|TB>)
+   * @param req Request Object
+   * @param uriInfo URI info
+   * @param headers Http Headers
+   *
+   * @return Standard JAX-RS Response
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public Response createVolume(String volume, final String quota, Request req,
+                               UriInfo uriInfo, HttpHeaders headers)
+      throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "createVolume");
+    return new VolumeProcessTemplate() {
+      @Override
+      public Response doProcess(VolumeArgs args)
+          throws IOException, OzoneException {
+        UserAuth auth = UserHandlerBuilder.getAuthHandler();
+        if (auth.isAdmin(args)) {
+          args.setAdminName(args.getUserName());
+          String volumeOwner = auth.getOzoneUser(args);
+
+          if (volumeOwner == null) {
+            throw ErrorTable.newError(ErrorTable.USER_NOT_FOUND, args);
+          }
+
+          if (!auth.isUser(volumeOwner, args)) {
+            throw ErrorTable.newError(ErrorTable.USER_NOT_FOUND, args);
+          }
+
+          args.setUserName(volumeOwner);
+          args.setGroups(auth.getGroups(args));
+          if (!quota.equals(Header.OZONE_QUOTA_UNDEFINED)) {
+            setQuotaArgs(args, quota);
+          }
+          StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+          fs.createVolume(args);
+          return OzoneRestUtils.getResponse(args, HTTP_CREATED, "");
+        } else {
+          throw ErrorTable.newError(ErrorTable.ACCESS_DENIED, args);
+        }
+      }
+    }.handleCall(volume, req, uriInfo, headers);
+  }
+
+  /**
+   * Updates  volume metadata.
+   *
+   * There are only two actions possible currently with updateVolume.
+   * Change the volume ownership or update quota. if you make a call
+   * with neither of these actions, update just returns 200 OK.
+   *
+   * @param volume Volume Name, this has to be unique at Ozone Level
+   * @param quota Quota for this volume - <int>(<BYTES|MB|GB|TB>)|remove
+   * @param req - Request Object
+   * @param uriInfo - URI info
+   * @param headers Http Headers
+   *
+   * @return Standard JAX-RS Response
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public Response updateVolume(String volume, final String quota, Request req,
+                               UriInfo uriInfo, HttpHeaders headers)
+      throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "updateVolume");
+    return new VolumeProcessTemplate() {
+      @Override
+      public Response doProcess(VolumeArgs args)
+          throws IOException, OzoneException {
+        UserAuth auth = UserHandlerBuilder.getAuthHandler();
+        if (auth.isAdmin(args)) {
+          StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+          args.setAdminName(args.getUserName());
+          String newVolumeOwner = auth.getOzoneUser(args);
+
+          if (newVolumeOwner != null) {
+            if (!auth.isUser(newVolumeOwner, args)) {
+              throw ErrorTable.newError(ErrorTable.USER_NOT_FOUND, args);
+            }
+            args.setUserName(newVolumeOwner);
+            fs.setVolumeOwner(args);
+          }
+
+          if (!quota.equals(Header.OZONE_QUOTA_UNDEFINED)) {
+            if (quota.equals(Header.OZONE_QUOTA_REMOVE)) {
+              // if it is remove, just tell the file system to remove quota
+              fs.setVolumeQuota(args, true);
+            } else {
+              setQuotaArgs(args, quota);
+              fs.setVolumeQuota(args, false);
+            }
+          }
+          return OzoneRestUtils.getResponse(args, HTTP_OK, "");
+        } else {
+          // Only Admins are allowed to update volumes
+          throw ErrorTable.newError(ErrorTable.ACCESS_DENIED, args);
+        }
+      }
+    }.handleCall(volume, req, uriInfo, headers);
+  }
+
+
+  /**
+   * Deletes a volume if it is empty.
+   *
+   * @param volume Volume Name
+   * @param req - Http Request
+   * @param uriInfo - http URI
+   * @param headers - http headers
+   *
+   * @return Standard JAX-RS Response
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public Response deleteVolume(String volume, Request req, UriInfo uriInfo,
+                               HttpHeaders headers) throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "deleteVolume");
+
+    return new VolumeProcessTemplate() {
+      @Override
+      public Response doProcess(VolumeArgs args)
+          throws IOException, OzoneException {
+        UserAuth auth = UserHandlerBuilder.getAuthHandler();
+        if (auth.isAdmin(args)) {
+          StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+          fs.deleteVolume(args);
+          return OzoneRestUtils.getResponse(args, HTTP_OK, "");
+        } else {
+          throw ErrorTable.newError(ErrorTable.ACCESS_DENIED, args);
+        }
+      }
+    }.handleCall(volume, req, uriInfo, headers);
+  }
+
+  /**
+   * Returns Volume info. This API can be invoked either by admin or the owner
+   *
+   * @param volume  - Storage Volume Name
+   * @param info    - Info attribute
+   * @param prefix  - Prefix key
+   * @param maxKeys - Max results
+   * @param prevKey - PrevKey
+   * @param req     - Http Req
+   * @param uriInfo - UriInfo.
+   * @param headers - Http headers
+   * @return
+   * @throws OzoneException
+   */
+  @Override
+  public Response getVolumeInfo(String volume, final String info,
+                                final String prefix,
+                                final int maxKeys,
+                                final String prevKey,
+                                final boolean rootScan,
+                                Request req,
+                                final UriInfo uriInfo, HttpHeaders headers)
+      throws OzoneException {
+
+    return new VolumeProcessTemplate() {
+      @Override
+      public Response doProcess(VolumeArgs args)
+          throws IOException, OzoneException {
+
+        switch (info) {
+        case Header.OZONE_INFO_QUERY_BUCKET:
+          MDC.put(OZONE_FUNCTION, "ListBucket");
+          return getBucketsInVolume(args, prefix, maxKeys, prevKey);
+        case Header.OZONE_INFO_QUERY_VOLUME:
+          MDC.put(OZONE_FUNCTION, "InfoVolume");
+          assertNoListParamPresent(uriInfo, args);
+          return getVolumeInfoResponse(args); // Return volume info
+        case Header.OZONE_LIST_QUERY_SERVICE:
+          MDC.put(OZONE_FUNCTION, "ListVolume");
+          return getVolumesByUser(args, prefix, maxKeys, prevKey, rootScan);
+        default:
+          LOG.debug("Unrecognized query param : {} ", info);
+          OzoneException ozoneException =
+              ErrorTable.newError(ErrorTable.INVALID_QUERY_PARAM, args);
+          ozoneException.setMessage("Unrecognized query param : " + info);
+          throw ozoneException;
+        }
+      }
+    }.handleCall(volume, req, uriInfo, headers);
+  }
+
+  /**
+   * Asserts no list query param is present during this call.
+   *
+   * @param uriInfo - UriInfo.   - UriInfo
+   * @param args    - Volume Args - VolumeArgs.
+   * @throws OzoneException
+   */
+  private void assertNoListParamPresent(final UriInfo uriInfo, VolumeArgs
+      args) throws
+      OzoneException {
+
+    String prefix = uriInfo.getQueryParameters().getFirst("prefix");
+    String maxKeys = uriInfo.getQueryParameters().getFirst("max_keys");
+    String prevKey = uriInfo.getQueryParameters().getFirst("prev_key");
+    if ((prefix != null && !prefix.equals(Header.OZONE_EMPTY_STRING)) ||
+        (maxKeys != null && !maxKeys.equals(Header.OZONE_DEFAULT_LIST_SIZE)) ||
+        (prevKey != null && !prevKey.equals(Header.OZONE_EMPTY_STRING))) {
+      throw ErrorTable.newError(ErrorTable.INVALID_QUERY_PARAM, args);
+    }
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java
new file mode 100644
index 0000000..1d98400
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java
@@ -0,0 +1,276 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.handlers;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.IOException;
+import java.nio.file.DirectoryNotEmptyException;
+import java.nio.file.FileAlreadyExistsException;
+import java.nio.file.NoSuchFileException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.OzoneRestUtils;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.interfaces.UserAuth;
+import org.apache.hadoop.ozone.web.response.ListBuckets;
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+
+import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_COMPONENT;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_REQUEST;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_RESOURCE;
+import static org.apache.hadoop.ozone.OzoneConsts.OZONE_USER;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+
+
+/**
+ * This class abstracts way the repetitive tasks in
+ * handling volume related code.
+ */
+@InterfaceAudience.Private
+public abstract class VolumeProcessTemplate {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(VolumeProcessTemplate.class);
+
+
+  /**
+   * The handle call is the common functionality for Volume
+   * handling code.
+   *
+   * @param volume - Name of the Volume
+   * @param request - request
+   * @param info - UriInfo
+   * @param headers - Http Headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  public Response handleCall(String volume, Request request, UriInfo info,
+                             HttpHeaders headers) throws OzoneException {
+    String reqID = OzoneUtils.getRequestID();
+    String hostName = OzoneUtils.getHostName();
+    MDC.put(OZONE_COMPONENT, "ozone");
+    MDC.put(OZONE_REQUEST, reqID);
+    UserArgs userArgs  = null;
+    try {
+      userArgs = new UserArgs(reqID, hostName, request, info, headers);
+      OzoneRestUtils.validate(request, headers, reqID, volume, hostName);
+
+      // we use the same logic for both bucket and volume names
+      OzoneUtils.verifyResourceName(volume);
+      UserAuth auth = UserHandlerBuilder.getAuthHandler();
+
+      userArgs.setUserName(auth.getUser(userArgs));
+      MDC.put(OZONE_USER, userArgs.getUserName());
+      VolumeArgs args = new VolumeArgs(volume, userArgs);
+
+      MDC.put(OZONE_RESOURCE, args.getResourceName());
+      Response response =  doProcess(args);
+      LOG.info("Success");
+      MDC.clear();
+      return response;
+
+    } catch (IllegalArgumentException ex) {
+      LOG.error("Illegal argument.", ex);
+      throw ErrorTable.newError(ErrorTable.INVALID_VOLUME_NAME, userArgs, ex);
+    } catch (IOException ex) {
+      handleIOException(volume, reqID, hostName, ex);
+    }
+    return null;
+  }
+
+  /**
+   * Specific handler for each call.
+   *
+   * @param args - Volume Args
+   *
+   * @return - Response
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  public abstract Response doProcess(VolumeArgs args)
+      throws IOException, OzoneException;
+
+  /**
+   * Maps Java File System Exceptions to Ozone Exceptions in the Volume path.
+   *
+   * @param volume - Name of the Volume
+   * @param reqID - Request ID
+   * @param hostName - HostName
+   * @param fsExp - Exception
+   *
+   * @throws OzoneException
+   */
+  private void handleIOException(String volume, String reqID, String hostName,
+                                 IOException fsExp) throws OzoneException {
+    LOG.error("IOException:", fsExp);
+    OzoneException exp = null;
+
+    if ((fsExp != null && fsExp.getMessage().endsWith(
+        KeySpaceManagerProtocolProtos.Status.VOLUME_ALREADY_EXISTS.name()))
+        || fsExp instanceof FileAlreadyExistsException) {
+      exp = ErrorTable
+          .newError(ErrorTable.VOLUME_ALREADY_EXISTS, reqID, volume, hostName);
+    }
+
+    if (fsExp instanceof DirectoryNotEmptyException) {
+      exp = ErrorTable
+          .newError(ErrorTable.VOLUME_NOT_EMPTY, reqID, volume, hostName);
+    }
+
+    if (fsExp instanceof NoSuchFileException) {
+      exp = ErrorTable
+          .newError(ErrorTable.INVALID_VOLUME_NAME, reqID, volume, hostName);
+    }
+
+    if ((fsExp != null) && (exp != null)) {
+      exp.setMessage(fsExp.getMessage());
+    }
+
+    // We don't handle that FS error yet, report a Server Internal Error
+    if (exp == null) {
+      exp =
+          ErrorTable.newError(ErrorTable.SERVER_ERROR, reqID, volume, hostName);
+      if (fsExp != null) {
+        exp.setMessage(fsExp.getMessage());
+      }
+    }
+    throw exp;
+  }
+
+  /**
+   * Set the user provided string into args and throw ozone exception
+   * if needed.
+   *
+   * @param args - volume args
+   * @param quota - quota sting
+   *
+   * @throws OzoneException
+   */
+  void setQuotaArgs(VolumeArgs args, String quota) throws OzoneException {
+    try {
+      args.setQuota(quota);
+    } catch (IllegalArgumentException ex) {
+      LOG.debug("Malformed Quota.", ex);
+      throw ErrorTable.newError(ErrorTable.MALFORMED_QUOTA, args, ex);
+    }
+  }
+
+  /**
+   * Wraps calls into volumeInfo data.
+   *
+   * @param args - volumeArgs
+   *
+   * @return - VolumeInfo
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  Response getVolumeInfoResponse(VolumeArgs args)
+      throws IOException, OzoneException {
+    StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+    VolumeInfo info = fs.getVolumeInfo(args);
+    return OzoneRestUtils.getResponse(args, HTTP_OK, info.toJsonString());
+  }
+
+  /**
+   * Returns all the volumes belonging to a user.
+   *
+   * @param user - userArgs
+   * @return - Response
+   * @throws OzoneException
+   * @throws IOException
+   */
+  Response getVolumesByUser(UserArgs user, String prefix, int maxKeys,
+      String prevKey, boolean rootScan) throws OzoneException, IOException {
+
+    String validatedUser = user.getUserName();
+    try {
+      UserAuth auth = UserHandlerBuilder.getAuthHandler();
+      if(rootScan && !auth.isAdmin(user)) {
+        throw ErrorTable.newError(ErrorTable.UNAUTHORIZED, user);
+      }
+      if (auth.isAdmin(user)) {
+        validatedUser = auth.getOzoneUser(user);
+        if (validatedUser == null) {
+          validatedUser = auth.getUser(user);
+        }
+      }
+
+      UserArgs onBehalfOf =
+          new UserArgs(validatedUser, user.getRequestID(), user.getHostName(),
+              user.getRequest(), user.getUri(), user.getHeaders());
+
+      StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+      ListArgs<UserArgs> listArgs = new ListArgs<>(onBehalfOf, prefix,
+          maxKeys, prevKey);
+      listArgs.setRootScan(rootScan);
+      ListVolumes volumes = fs.listVolumes(listArgs);
+      return OzoneRestUtils.getResponse(user, HTTP_OK, volumes.toJsonString());
+    } catch (IOException ex) {
+      LOG.debug("unable to get the volume list for the user.", ex);
+      OzoneException exp = ErrorTable.newError(ErrorTable.SERVER_ERROR,
+          user, ex);
+      exp.setMessage("unable to get the volume list for the user");
+      throw exp;
+    }
+  }
+
+  /**
+   * Returns a list of Buckets in a Volume.
+   *
+   * @param args    - VolumeArgs
+   * @param prefix  - Prefix to Match
+   * @param maxKeys - Max results to return.
+   * @param prevKey - PrevKey
+   * @return List of Buckets
+   * @throws OzoneException
+   */
+  Response getBucketsInVolume(VolumeArgs args, String prefix, int maxKeys,
+                              String prevKey) throws OzoneException {
+    try {
+      // UserAuth auth = UserHandlerBuilder.getAuthHandler();
+      // TODO : Check ACLS.
+      StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
+      ListArgs<VolumeArgs> listArgs = new ListArgs<>(args, prefix,
+          maxKeys, prevKey);
+      ListBuckets bucketList = fs.listBuckets(listArgs);
+      return OzoneRestUtils
+          .getResponse(args, HTTP_OK, bucketList.toJsonString());
+    } catch (IOException ex) {
+      LOG.debug("unable to get the bucket list for the specified volume.", ex);
+      OzoneException exp =
+          ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+      exp.setMessage("unable to get the bucket list for the specified volume.");
+      throw exp;
+    }
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/package-info.java
new file mode 100644
index 0000000..4d34c2d
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/handlers/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.handlers;
+
+/**
+ This package contains ozone client side libraries.
+ */
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Accounting.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Accounting.java
new file mode 100644
index 0000000..f03276c
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Accounting.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.interfaces;
+
+/**
+ * This in the accounting interface, Ozone Rest interface will call into this
+ * interface whenever a put or delete key happens.
+ * <p>
+ * TODO : Technically we need to report bucket creation and deletion too
+ * since the bucket names and metadata consume storage.
+ * <p>
+ * TODO : We should separate out reporting metadata & data --
+ * <p>
+ * In some cases end users will only want to account for the data they are
+ * storing since metadata is mostly a cost of business.
+ */
+public interface Accounting {
+  /**
+   * This call is made when ever a put key call is made.
+   * <p>
+   * In case of a Put which causes a over write of a key accounting system will
+   * see two calls, a removeByte call followed by an addByte call.
+   *
+   * @param owner  - Volume Owner
+   * @param volume - Name of the Volume
+   * @param bucket - Name of the bucket
+   * @param bytes  - How many bytes are put
+   */
+  void addBytes(String owner, String volume, String bucket, int bytes);
+
+  /**
+   * This call is made whenever a delete call is made.
+   *
+   * @param owner  - Volume Owner
+   * @param volume - Name of the Volume
+   * @param bucket - Name of the bucket
+   * @param bytes  - How many bytes are deleted
+   */
+  void removeBytes(String owner, String volume, String bucket, int bytes);
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Bucket.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Bucket.java
new file mode 100644
index 0000000..4c8b85b
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Bucket.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.interfaces;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+/**
+ * Bucket Interface acts as the HTTP entry point for
+ * bucket related functionality.
+ */
+@Path("/{volume}/{bucket}")
+@Api(tags = "bucket")
+public interface Bucket {
+  /**
+   * createBucket call handles the POST request for Creating a Bucket.
+   *
+   * @param volume - Volume name
+   * @param bucket - Bucket Name
+   * @param req - Http request
+   * @param info - Uri Info
+   * @param headers - Http headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @POST
+  @ApiOperation("Create new bucket to a volume")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response createBucket(@PathParam("volume") String volume,
+                        @PathParam("bucket") String bucket,
+                        @Context Request req, @Context UriInfo info,
+                        @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * updateBucket call handles the PUT request for updating a Bucket.
+   *
+   * @param volume - Volume name
+   * @param bucket - Bucket name
+   * @param req - Http request
+   * @param info - Uri Info
+   * @param headers - Http headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @PUT
+  @ApiOperation("Update bucket")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response updateBucket(@PathParam("volume") String volume,
+                        @PathParam("bucket") String bucket,
+                        @Context Request req, @Context UriInfo info,
+                        @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * Deletes an empty bucket.
+   *
+   * @param volume Volume name
+   * @param bucket Bucket Name
+   * @param req - Http request
+   * @param info - Uri Info
+   * @param headers - Http headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @DELETE
+  @ApiOperation("Deletes an empty bucket.")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response deleteBucket(@PathParam("volume") String volume,
+                        @PathParam("bucket") String bucket,
+                        @Context Request req, @Context UriInfo info,
+                        @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * List Buckets lists the contents of a bucket.
+   *
+   * @param volume - Storage Volume Name
+   * @param bucket - Bucket Name
+   * @param info - Information type needed
+   * @param prefix - Prefix for the keys to be fetched
+   * @param maxKeys - MaxNumber of Keys to Return
+   * @param prevKey - Continuation Token
+   * @param req - Http request
+   * @param headers - Http headers
+   *
+   * @return - Json Body
+   *
+   * @throws OzoneException
+   */
+
+  @GET
+  @Produces(MediaType.APPLICATION_JSON)
+  @ApiOperation("List contents of a bucket")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response listBucket(@PathParam("volume") String volume,
+                      @PathParam("bucket") String bucket,
+                      @DefaultValue(Header.OZONE_INFO_QUERY_KEY)
+                      @QueryParam(Header.OZONE_INFO_QUERY_TAG)
+                      String info,
+                      @QueryParam(Header.OZONE_LIST_QUERY_PREFIX)
+                      String prefix,
+                      @DefaultValue(Header.OZONE_DEFAULT_LIST_SIZE)
+                      @QueryParam(Header.OZONE_LIST_QUERY_MAXKEYS)
+                      int maxKeys,
+                      @QueryParam(Header.OZONE_LIST_QUERY_PREVKEY)
+                      String prevKey,
+                      @Context Request req, @Context UriInfo uriInfo,
+                      @Context HttpHeaders headers) throws OzoneException;
+
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Keys.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Keys.java
new file mode 100644
index 0000000..f9255f2
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Keys.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.interfaces;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.io.InputStream;
+
+/**
+ * This interface defines operations permitted on a key.
+ */
+@Path("/{volume}/{bucket}/{keys:.*}")
+@Api(tags = "key")
+public interface Keys {
+
+  /**
+   * Adds a key to an existing bucket. If the object already exists
+   * this call will overwrite or add with new version number if the bucket
+   * versioning is turned on.
+   *
+   * @param volume Storage Volume Name
+   * @param bucket Name of the bucket
+   * @param keys Name of the Object
+   * @param is InputStream or File Data
+   * @param req Request
+   * @param headers http headers
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @PUT
+  @Consumes(MediaType.WILDCARD)
+  @ApiOperation(value = "Adds a key to an existing bucket.", notes = "If the "
+      + "object already exists this call will overwrite or add with new version"
+      + " number if the bucket versioning is turned on.")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response putKey(@PathParam("volume") String volume,
+      @PathParam("bucket") String bucket, @PathParam("keys") String keys,
+      InputStream is, @Context Request req, @Context UriInfo info,
+      @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * Gets the Key if it exists.
+   *
+   * @param volume Storage Volume
+   * @param bucket Name of the bucket
+   * @param keys Object Name
+   * @param info Tag info
+   * @param req Request
+   * @param uriInfo Uri info
+   * @param headers Http Header
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @GET
+  @ApiOperation("Gets the Key if it exists.")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response getKey(@PathParam("volume") String volume,
+      @PathParam("bucket") String bucket, @PathParam("keys") String keys,
+      @QueryParam(Header.OZONE_INFO_QUERY_TAG) String info,
+      @Context Request req, @Context UriInfo uriInfo,
+      @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * Deletes an existing key.
+   *
+   * @param volume Storage Volume Name
+   * @param bucket Name of the bucket
+   * @param keys Name of the Object
+   * @param req http Request
+   * @param headers HttpHeaders
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @DELETE
+  @ApiOperation("Deletes an existing key")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response deleteKey(@PathParam("volume") String volume,
+      @PathParam("bucket") String bucket, @PathParam("keys") String keys,
+      @Context Request req, @Context UriInfo info, @Context HttpHeaders headers)
+      throws OzoneException;
+}
+
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/StorageHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/StorageHandler.java
new file mode 100644
index 0000000..6336c90
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/StorageHandler.java
@@ -0,0 +1,295 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.interfaces;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.ListArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.response.ListBuckets;
+import org.apache.hadoop.ozone.web.response.ListKeys;
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * Storage handler Interface is the Interface between
+ * REST protocol and file system.
+ *
+ * We will have two default implementations of this interface.
+ * One for the local file system that is handy while testing
+ * and another which will point to the HDFS backend.
+ */
+@InterfaceAudience.Private
+public interface StorageHandler extends Closeable{
+
+  /**
+   * Creates a Storage Volume.
+   *
+   * @param args - Volume Name
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  void createVolume(VolumeArgs args) throws IOException, OzoneException;
+
+
+  /**
+   * setVolumeOwner - sets the owner of the volume.
+   *
+   * @param args owner info is present in the args
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  void setVolumeOwner(VolumeArgs args) throws IOException, OzoneException;
+
+
+  /**
+   * Set Volume Quota.
+   *
+   * @param args - Has Quota info
+   * @param remove - true if the request is to remove the quota
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  void setVolumeQuota(VolumeArgs args, boolean remove)
+      throws IOException, OzoneException;
+
+  /**
+   * Checks if a Volume exists and the user with a role specified has access
+   * to the Volume.
+   *
+   * @param volume - Volume Name whose access permissions needs to be checked
+   * @param acl - requested acls which needs to be checked for access
+   *
+   * @return - Boolean - True if the user with a role can access the volume.
+   * This is possible for owners of the volume and admin users
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  boolean checkVolumeAccess(String volume, OzoneAcl acl)
+      throws IOException, OzoneException;
+
+
+  /**
+   * Returns the List of Volumes owned by the specific user.
+   *
+   * @param args - ListArgs
+   *
+   * @return - List of Volumes
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  ListVolumes listVolumes(ListArgs args) throws IOException, OzoneException;
+
+  /**
+   * Deletes an Empty Volume.
+   *
+   * @param args - Volume Args
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  void deleteVolume(VolumeArgs args) throws IOException, OzoneException;
+
+
+  /**
+   * Returns Info about the specified Volume.
+   *
+   * @param args - Volume Args
+   *
+   * @return VolumeInfo
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  VolumeInfo getVolumeInfo(VolumeArgs args) throws IOException, OzoneException;
+
+  /**
+   * Creates a Bucket in specified Volume.
+   *
+   * @param args BucketArgs- BucketName, UserName and Acls
+   *
+   * @throws IOException
+   */
+  void createBucket(BucketArgs args) throws IOException, OzoneException;
+
+  /**
+   * Adds or Removes ACLs from a Bucket.
+   *
+   * @param args - BucketArgs
+   *
+   * @throws IOException
+   */
+  void setBucketAcls(BucketArgs args) throws IOException, OzoneException;
+
+  /**
+   * Enables or disables Bucket Versioning.
+   *
+   * @param args - BucketArgs
+   *
+   * @throws IOException
+   */
+  void setBucketVersioning(BucketArgs args) throws IOException, OzoneException;
+
+  /**
+   * Sets the Storage Class of a Bucket.
+   *
+   * @param args - BucketArgs
+   *
+   * @throws IOException
+   */
+  void setBucketStorageClass(BucketArgs args)
+      throws IOException, OzoneException;
+
+  /**
+   * Deletes a bucket if it is empty.
+   *
+   * @param args Bucket args structure
+   *
+   * @throws IOException
+   */
+  void deleteBucket(BucketArgs args) throws IOException, OzoneException;
+
+  /**
+   * true if the bucket exists and user has read access
+   * to the bucket else throws Exception.
+   *
+   * @param args Bucket args structure
+   *
+   * @throws IOException
+   */
+  void checkBucketAccess(BucketArgs args) throws IOException, OzoneException;
+
+
+  /**
+   * Returns all Buckets of a specified Volume.
+   *
+   * @param listArgs -- List Args.
+   *
+   * @return ListAllBuckets
+   *
+   * @throws OzoneException
+   */
+  ListBuckets listBuckets(ListArgs listArgs) throws
+      IOException, OzoneException;
+
+
+  /**
+   * Returns Bucket's Metadata as a String.
+   *
+   * @param args Bucket args structure
+   *
+   * @return Info about the bucket
+   *
+   * @throws IOException
+   */
+  BucketInfo getBucketInfo(BucketArgs args) throws IOException, OzoneException;
+
+  /**
+   * Writes a key in an existing bucket.
+   *
+   * @param args KeyArgs
+   *
+   * @return InputStream
+   *
+   * @throws OzoneException
+   */
+  OutputStream newKeyWriter(KeyArgs args)
+      throws IOException, OzoneException;
+
+
+  /**
+   * Tells the file system that the object has been written out
+   * completely and it can do any house keeping operation that needs
+   * to be done.
+   *
+   * @param args Key Args
+   *
+   * @param stream
+   * @throws IOException
+   */
+  void commitKey(KeyArgs args, OutputStream stream)
+      throws IOException, OzoneException;
+
+
+  /**
+   * Reads a key from an existing bucket.
+   *
+   * @param args KeyArgs
+   *
+   * @return LengthInputStream
+   *
+   * @throws IOException
+   */
+  LengthInputStream newKeyReader(KeyArgs args)
+      throws IOException, OzoneException;
+
+
+  /**
+   * Deletes an existing key.
+   *
+   * @param args KeyArgs
+   *
+   * @throws OzoneException
+   */
+  void deleteKey(KeyArgs args) throws IOException, OzoneException;
+
+
+  /**
+   * Returns a list of Key.
+   *
+   * @param args KeyArgs
+   *
+   * @return BucketList
+   *
+   * @throws IOException
+   */
+  ListKeys listKeys(ListArgs args) throws IOException, OzoneException;
+
+  /**
+   * Get information of the specified Key.
+   *
+   * @param args Key Args
+   *
+   * @return KeyInfo
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  KeyInfo getKeyInfo(KeyArgs args) throws IOException, OzoneException;
+
+  /**
+   * Closes all the opened resources.
+   */
+  void close();
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/UserAuth.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/UserAuth.java
new file mode 100644
index 0000000..a1d2e7c
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/UserAuth.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.interfaces;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+
+/**
+ * This interface is used by Ozone to determine user identity.
+ *
+ * Please see concrete implementations for more information
+ */
+@InterfaceAudience.Private
+public interface UserAuth {
+  /**
+   * Returns the user name as a string from the URI and HTTP headers.
+   *
+   * @param userArgs - userArgs
+   *
+   * @return String - User name
+   *
+   * @throws OzoneException
+   */
+  String getUser(UserArgs userArgs) throws OzoneException;
+
+  /**
+   * Returns all the Groups that user is a member of.
+   *
+   * @param userArgs - userArgs
+   *
+   * @return Array of Groups
+   *
+   * @throws OzoneException
+   */
+  String[] getGroups(UserArgs userArgs) throws OzoneException;
+
+  /**
+   * Returns true if a user is a Admin.
+   *
+   * @param userArgs - userArgs
+   *
+   * @return true if Admin , false otherwise
+   *
+   * @throws OzoneException -- Allows the underlying system
+   * to throw, that error will get propagated to clients
+   */
+  boolean isAdmin(UserArgs userArgs) throws OzoneException;
+
+  /**
+   * Returns true if the request is Anonymous.
+   *
+   * @param userArgs - userArgs
+   *
+   * @return true if the request is anonymous, false otherwise.
+   *
+   * @throws OzoneException - Will be propagated back to end user
+   */
+  boolean isAnonymous(UserArgs userArgs) throws OzoneException;
+
+  /**
+   * Returns true if the name is a recognizable user in the system.
+   *
+   * @param userName - User Name to check
+   * @param userArgs - userArgs
+   *
+   * @return true if the username string is the name of a valid user.
+   *
+   * @throws OzoneException - Will be propagated back to end user
+   */
+  boolean isUser(String userName, UserArgs userArgs) throws OzoneException;
+
+  /**
+   * Returns the x-ozone-user or the user on behalf of, This is
+   * used in Volume creation path.
+   *
+   * @param userArgs - userArgs
+   *
+   * @return a user name if it has x-ozone-user args in header.
+   *
+   * @throws OzoneException
+   */
+  String getOzoneUser(UserArgs userArgs) throws OzoneException;
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Volume.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Volume.java
new file mode 100644
index 0000000..85b2240
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/Volume.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.interfaces;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Request;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+/**
+ * Volume Interface acts as the HTTP entry point for
+ * volume related functionality.
+ */
+@InterfaceAudience.Private
+@Path("/{volume}")
+@Api(tags = "volume")
+public interface Volume {
+
+  /**
+   * Creates a Volume owned by the user.
+   *
+   * Params :
+   * Quota - Specifies the Maximum usable size by the user
+   * the valid parameters for quota are <int>(<BYTES| MB|GB|TB>) | remove.
+   * For example 10GB or "remove".
+   *
+   * @param volume Volume Name, this has to be unique at Ozone Level
+   * @param quota Quota for this Storage Volume - <int>(<MB|GB|TB>) | remove
+   * @param req - Request Object - Request Object
+   * @param uriInfo - Http UriInfo
+   * @param headers Http Headers HttpHeaders
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+
+  @POST
+  @ApiOperation("Creates a Volume owned by the user")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response createVolume(@PathParam("volume") String volume,
+      @DefaultValue(Header.OZONE_QUOTA_UNDEFINED)
+      @QueryParam(Header.OZONE_QUOTA_QUERY_TAG) String quota,
+      @Context Request req, @Context UriInfo uriInfo,
+      @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * Updates a Volume owned by the user.
+   *
+   * Params :
+   * Owner - Specifies the name of the owner
+   * Quota - Specifies the Maximum usable size by the user
+   * the valid parameters for quota are <int>(<MB|GB|TB>) | remove.
+   * For example 10GB or "remove".
+   *
+   * @param volume Volume Name, this has to be unique at Ozone Level
+   * @param quota Quota for this Storage Volume - <int>(<MB|GB|TB>) | remove
+   * @param req - Request Object - Request Object
+   * @param headers Http Headers HttpHeaders
+   *
+   * @return Response
+   *
+   * @throws OzoneException
+   */
+  @PUT
+  @ApiOperation("Updates a Volume owned by the user")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response updateVolume(@PathParam("volume") String volume,
+      @DefaultValue(Header.OZONE_QUOTA_UNDEFINED)
+      @QueryParam(Header.OZONE_QUOTA_QUERY_TAG) String quota,
+      @Context Request req, @Context UriInfo uriInfo,
+      @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * Deletes a Volume if it is empty.
+   *
+   * @param volume Storage Volume Name
+   *
+   * @return Response Response
+   *
+   * @throws OzoneException
+   */
+  @DELETE
+  @ApiOperation("Deletes a Volume if it is empty")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response deleteVolume(@PathParam("volume") String volume,
+      @Context Request req, @Context UriInfo uriInfo,
+      @Context HttpHeaders headers) throws OzoneException;
+
+  /**
+   * Returns Volume info. This API can be invoked either
+   * by admin or the owner
+   *
+   * @param volume - Storage Volume Name
+   * @param req - Http Req
+   * @param headers - Http headers
+   *
+   * @return - Response
+   *
+   * @throws OzoneException
+   */
+  @GET
+  @ApiOperation(value = "Returns Volume info", notes = "This API can be "
+      + "invoked either by admin or the owner")
+  @ApiImplicitParams({
+      @ApiImplicitParam(name = "x-ozone-version", example = "v1", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "x-ozone-user", example = "user", required =
+          true, paramType = "header"),
+      @ApiImplicitParam(name = "Date", example = "Date: Mon, 26 Jun 2017 "
+          + "04:23:30 GMT", required = true, paramType = "header"),
+      @ApiImplicitParam(name = "Authorization", example = "OZONE", required =
+          true, paramType = "header")})
+  Response getVolumeInfo(@PathParam("volume") String volume,
+      @DefaultValue(Header.OZONE_INFO_QUERY_BUCKET)
+      @QueryParam(Header.OZONE_INFO_QUERY_TAG) String info,
+      @QueryParam(Header.OZONE_LIST_QUERY_PREFIX) String prefix,
+      @DefaultValue(Header.OZONE_DEFAULT_LIST_SIZE)
+      @QueryParam(Header.OZONE_LIST_QUERY_MAXKEYS) int keys,
+      @QueryParam(Header.OZONE_LIST_QUERY_PREVKEY) String prevKey,
+      @QueryParam(Header.OZONE_LIST_QUERY_ROOTSCAN) boolean rootScan,
+      @Context Request req, @Context UriInfo uriInfo,
+      @Context HttpHeaders headers) throws OzoneException;
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/package-info.java
new file mode 100644
index 0000000..940f179
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/interfaces/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.interfaces;
+
+/**
+ This package contains ozone client side libraries.
+ */
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/LocalStorageHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/LocalStorageHandler.java
new file mode 100644
index 0000000..9747eac
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/LocalStorageHandler.java
@@ -0,0 +1,379 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.localstorage;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.ListArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.response.ListBuckets;
+import org.apache.hadoop.ozone.web.response.ListKeys;
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * PLEASE NOTE : This file is a dummy backend for test purposes and prototyping
+ * effort only. It does not handle any Object semantics correctly, neither does
+ * it take care of security.
+ */
+@InterfaceAudience.Private
+public class LocalStorageHandler implements StorageHandler {
+  private final Configuration conf;
+
+  /**
+   * Constructs LocalStorageHandler.
+   *
+   * @param conf ozone conf.
+   */
+  public LocalStorageHandler(Configuration conf) {
+    this.conf = conf;
+  }
+
+  /**
+   * Creates Storage Volume.
+   *
+   * @param args - volumeArgs
+   * @throws IOException
+   */
+  @Override
+  public void createVolume(VolumeArgs args) throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.createVolume(args);
+
+  }
+
+  /**
+   * setVolumeOwner - sets the owner of the volume.
+   *
+   * @param args volumeArgs
+   * @throws IOException
+   */
+  @Override
+  public void setVolumeOwner(VolumeArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.setVolumeProperty(args, OzoneMetadataManager.VolumeProperty.OWNER);
+  }
+
+  /**
+   * Set Volume Quota Info.
+   *
+   * @param args   - volumeArgs
+   * @param remove - true if the request is to remove the quota
+   * @throws IOException
+   */
+  @Override
+  public void setVolumeQuota(VolumeArgs args, boolean remove)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+
+    if (remove) {
+      OzoneQuota quota = new OzoneQuota();
+      args.setQuota(quota);
+    }
+    oz.setVolumeProperty(args, OzoneMetadataManager.VolumeProperty.QUOTA);
+  }
+
+  /**
+   * Checks if a Volume exists and the user specified has access to the volume.
+   *
+   * @param volume - Volume Name
+   * @param acl - Ozone acl which needs to be compared for access
+   * @return - Boolean - True if the user can modify the volume. This is
+   * possible for owners of the volume and admin users
+   * @throws IOException
+   */
+  @Override
+  public boolean checkVolumeAccess(String volume, OzoneAcl acl)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.checkVolumeAccess(volume, acl);
+  }
+
+  /**
+   * Returns Info about the specified Volume.
+   *
+   * @param args - volumeArgs
+   * @return VolumeInfo
+   * @throws IOException
+   */
+  @Override
+  public VolumeInfo getVolumeInfo(VolumeArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.getVolumeInfo(args);
+  }
+
+  /**
+   * Deletes an Empty Volume.
+   *
+   * @param args - Volume Args
+   * @throws IOException
+   */
+  @Override
+  public void deleteVolume(VolumeArgs args) throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.deleteVolume(args);
+
+  }
+
+  /**
+   * Returns the List of Volumes owned by the specific user.
+   *
+   * @param args - ListArgs
+   * @return - List of Volumes
+   * @throws IOException
+   */
+  @Override
+  public ListVolumes listVolumes(ListArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.listVolumes(args);
+  }
+
+  /**
+   * true if the bucket exists and user has read access to the bucket else
+   * throws Exception.
+   *
+   * @param args Bucket args structure
+   * @throws IOException
+   */
+  @Override
+  public void checkBucketAccess(BucketArgs args)
+      throws IOException, OzoneException {
+
+  }
+
+  /**
+   * Creates a Bucket in specified Volume.
+   *
+   * @param args BucketArgs- BucketName, UserName and Acls
+   * @throws IOException
+   */
+  @Override
+  public void createBucket(BucketArgs args) throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.createBucket(args);
+  }
+
+  /**
+   * Adds or Removes ACLs from a Bucket.
+   *
+   * @param args - BucketArgs
+   * @throws IOException
+   */
+  @Override
+  public void setBucketAcls(BucketArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.setBucketProperty(args, OzoneMetadataManager.BucketProperty.ACLS);
+  }
+
+  /**
+   * Enables or disables Bucket Versioning.
+   *
+   * @param args - BucketArgs
+   * @throws IOException
+   */
+  @Override
+  public void setBucketVersioning(BucketArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.setBucketProperty(args, OzoneMetadataManager.BucketProperty.VERSIONING);
+
+  }
+
+  /**
+   * Sets the Storage Class of a Bucket.
+   *
+   * @param args - BucketArgs
+   * @throws IOException
+   */
+  @Override
+  public void setBucketStorageClass(BucketArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.setBucketProperty(args, OzoneMetadataManager.BucketProperty.STORAGETYPE);
+
+  }
+
+  /**
+   * Deletes a bucket if it is empty.
+   *
+   * @param args Bucket args structure
+   * @throws IOException
+   */
+  @Override
+  public void deleteBucket(BucketArgs args) throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.deleteBucket(args);
+  }
+
+  /**
+   * Returns all Buckets of a specified Volume.
+   *
+   * @param args --User Args
+   * @return ListAllBuckets
+   * @throws OzoneException
+   */
+  @Override
+  public ListBuckets listBuckets(ListArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.listBuckets(args);
+  }
+
+  /**
+   * Returns Bucket's Metadata as a String.
+   *
+   * @param args Bucket args structure
+   * @return Info about the bucket
+   * @throws IOException
+   */
+  @Override
+  public BucketInfo getBucketInfo(BucketArgs args)
+      throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.getBucketInfo(args);
+  }
+
+  /**
+   * Writes a key in an existing bucket.
+   *
+   * @param args KeyArgs
+   * @return InputStream
+   * @throws OzoneException
+   */
+  @Override
+  public OutputStream newKeyWriter(KeyArgs args) throws IOException,
+      OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.createKey(args);
+  }
+
+  /**
+   * Tells the file system that the object has been written out completely and
+   * it can do any house keeping operation that needs to be done.
+   *
+   * @param args   Key Args
+   * @param stream
+   * @throws IOException
+   */
+  @Override
+  public void commitKey(KeyArgs args, OutputStream stream) throws
+      IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.commitKey(args, stream);
+
+  }
+
+  /**
+   * Reads a key from an existing bucket.
+   *
+   * @param args KeyArgs
+   * @return LengthInputStream
+   * @throws IOException
+   */
+  @Override
+  public LengthInputStream newKeyReader(KeyArgs args) throws IOException,
+      OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.newKeyReader(args);
+  }
+
+  /**
+   * Deletes an existing key.
+   *
+   * @param args KeyArgs
+   * @throws OzoneException
+   */
+  @Override
+  public void deleteKey(KeyArgs args) throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    oz.deleteKey(args);
+  }
+
+  /**
+   * Returns a list of Key.
+   *
+   * @param args KeyArgs
+   * @return BucketList
+   * @throws IOException
+   */
+  @Override
+  public ListKeys listKeys(ListArgs args) throws IOException, OzoneException {
+    OzoneMetadataManager oz =
+        OzoneMetadataManager.getOzoneMetadataManager(conf);
+    return oz.listKeys(args);
+
+  }
+
+  /**
+   * Get information of the specified Key.
+   *
+   * @param args Key Args
+   *
+   * @return KeyInfo
+   *
+   * @throws IOException
+   * @throws OzoneException
+   */
+  @Override
+  public KeyInfo getKeyInfo(KeyArgs args) throws IOException, OzoneException {
+    OzoneMetadataManager oz = OzoneMetadataManager
+        .getOzoneMetadataManager(conf);
+    return oz.getKeyInfo(args);
+  }
+
+  @Override
+  public void close() {
+    //No resource to close, do nothing.
+  }
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/OzoneMetadataManager.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/OzoneMetadataManager.java
new file mode 100644
index 0000000..1fe9a18
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/OzoneMetadataManager.java
@@ -0,0 +1,1138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.localstorage;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.ListArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.response.ListBuckets;
+import org.apache.hadoop.ozone.web.response.ListKeys;
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.response.VolumeOwner;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Locale;
+import java.util.TimeZone;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+/**
+ * A stand alone Ozone implementation that allows us to run Ozone tests in local
+ * mode. This acts as the ozone backend when using MiniDFSCluster for testing.
+ */
+public final class OzoneMetadataManager {
+
+  /*
+    OzoneMetadataManager manages volume/bucket/object metadata and
+    data.
+
+    Metadata is maintained in 2 level DB files, UserDB and MetadataDB.
+
+    UserDB contains a Name and a List. For example volumes owned by the user
+    bilbo, would be maintained in UserDB as {bilbo}->{shire, rings}
+
+    This list part of mapping is context sensitive.  That is, if you use {user
+    name} as the key, the list you get is a list of volumes. if you use
+    {user/volume} as the key the list you get is list of buckets. if you use
+    {user/volume/bucket} as key the list you get is the list of objects.
+
+    All keys in the UserDB starts with the UserName.
+
+    We also need to maintain a flat namespace for volumes. This is
+    maintained by the MetadataDB. MetadataDB contains the name of an
+    object(volume, bucket or key) and its associated metadata.
+    The keys in the Metadata DB are {volume}, {volume/bucket} or
+    {volume/bucket/key}. User name is absent, so we have a common root name
+    space for the volume.
+
+    The value of part of metadataDB points to corresponding *Info structures.
+    {volume] -> volumeInfo
+    {volume/bucket} -> bucketInfo
+    {volume/bucket/key} -> keyInfo
+
+
+    Here are various work flows :
+
+    CreateVolume -> Check if Volume exists in metadataDB, if not update UserDB
+    with a list of volumes and update metadataDB with VolumeInfo.
+
+    DeleteVolume -> Check the Volume, and check the VolumeInfo->bucketCount.
+    if bucketCount == 0, delete volume from userDB->{List of volumes} and
+    metadataDB.
+
+    Very similar work flows exist for CreateBucket and DeleteBucket.
+
+      // Please note : These database operations are *not* transactional,
+      // which means that failure can lead to inconsistencies.
+      // Only way to recover is to reset to a clean state, or
+      // use rm -rf /tmp/ozone :)
+
+    We have very simple locking policy. We have a ReaderWriter lock that is
+    taken for each action, this lock is aptly named "lock".
+
+    All actions *must* be performed with a lock held, either a read
+    lock or a write lock. Violation of these locking policies can be harmful.
+
+
+      // // IMPORTANT :
+      // //  This is a simulation layer, this is NOT how the real
+      // //  OZONE functions. This is written to so that we can write
+      // //  stand-alone tests for the protocol and client code.
+
+*/
+  static final Logger LOG = LoggerFactory.getLogger(OzoneMetadataManager.class);
+  private static final String USER_DB = "/user.db";
+  private static final String META_DB = "/metadata.db";
+  private static OzoneMetadataManager bm = null;
+  private MetadataStore userDB;
+  private MetadataStore metadataDB;
+  private ReadWriteLock lock;
+  private Charset encoding = Charset.forName("UTF-8");
+  private String storageRoot;
+  private static final String OBJECT_DIR = "/_objects/";
+
+  // This table keeps a pointer to objects whose operations
+  // are in progress but not yet committed to persistent store
+  private ConcurrentHashMap<OutputStream, String> inProgressObjects;
+
+  /**
+   * Constructs OzoneMetadataManager.
+   */
+  private OzoneMetadataManager(Configuration conf) throws IOException {
+
+    lock = new ReentrantReadWriteLock();
+    storageRoot =
+        conf.getTrimmed(OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT,
+            OzoneConfigKeys.OZONE_LOCALSTORAGE_ROOT_DEFAULT);
+
+    File file = new File(storageRoot + OBJECT_DIR);
+
+    if (!file.exists() && !file.mkdirs()) {
+      LOG.error("Creation of Ozone root failed. " + file.toString());
+      throw new IOException("Creation of Ozone root failed.");
+    }
+
+    try {
+      userDB = MetadataStoreBuilder.newBuilder()
+          .setDbFile(new File(storageRoot + USER_DB))
+          .setCreateIfMissing(true)
+          .build();
+      metadataDB = MetadataStoreBuilder.newBuilder()
+          .setDbFile(new File(storageRoot + META_DB))
+          .setCreateIfMissing(true)
+          .build();
+      inProgressObjects = new ConcurrentHashMap<>();
+    } catch (IOException ex) {
+      LOG.error("Cannot open db :" + ex.getMessage());
+      throw ex;
+    }
+  }
+
+  /**
+   * Gets Ozone Manager.
+   *
+   * @return OzoneMetadataManager
+   */
+  public static synchronized OzoneMetadataManager
+      getOzoneMetadataManager(Configuration conf) throws IOException {
+    if (bm == null) {
+      bm = new OzoneMetadataManager(conf);
+    }
+    return bm;
+  }
+
+  /**
+   * Creates a volume.
+   *
+   * @param args - VolumeArgs
+   * @throws OzoneException
+   */
+  public void createVolume(VolumeArgs args) throws OzoneException {
+    lock.writeLock().lock();
+    try {
+      SimpleDateFormat format =
+          new SimpleDateFormat(OzoneConsts.OZONE_DATE_FORMAT, Locale.US);
+      format.setTimeZone(TimeZone.getTimeZone(OzoneConsts.OZONE_TIME_ZONE));
+
+      byte[] volumeName =
+          metadataDB.get(args.getVolumeName().getBytes(encoding));
+
+      if (volumeName != null) {
+        LOG.debug("Volume {} already exists.", volumeName);
+        throw ErrorTable.newError(ErrorTable.VOLUME_ALREADY_EXISTS, args);
+      }
+
+      VolumeInfo newVInfo = new VolumeInfo(args.getVolumeName(), format
+          .format(new Date(System.currentTimeMillis())), args.getAdminName());
+
+      newVInfo.setQuota(args.getQuota());
+      VolumeOwner owner = new VolumeOwner(args.getUserName());
+      newVInfo.setOwner(owner);
+
+      ListVolumes volumeList;
+      byte[] userVolumes = userDB.get(args.getUserName().getBytes(encoding));
+      if (userVolumes == null) {
+        volumeList = new ListVolumes();
+      } else {
+        volumeList = ListVolumes.parse(new String(userVolumes, encoding));
+      }
+
+      volumeList.addVolume(newVInfo);
+      volumeList.sort();
+
+      // Please note : These database operations are *not* transactional,
+      // which means that failure can lead to inconsistencies.
+      // Only way to recover is to reset to a clean state, or
+      // use rm -rf /tmp/ozone :)
+
+
+      userDB.put(args.getUserName().getBytes(encoding),
+          volumeList.toDBString().getBytes(encoding));
+
+      metadataDB.put(args.getVolumeName().getBytes(encoding),
+          newVInfo.toDBString().getBytes(encoding));
+
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Updates the Volume properties like Owner Name and Quota.
+   *
+   * @param args     - Volume Args
+   * @param property - Flag which tells us what property to upgrade
+   * @throws OzoneException
+   */
+  public void setVolumeProperty(VolumeArgs args, VolumeProperty property)
+      throws OzoneException {
+    lock.writeLock().lock();
+    try {
+      byte[] volumeInfo =
+          metadataDB.get(args.getVolumeName().getBytes(encoding));
+      if (volumeInfo == null) {
+        throw ErrorTable.newError(ErrorTable.VOLUME_NOT_FOUND, args);
+      }
+      VolumeInfo info = VolumeInfo.parse(new String(volumeInfo, encoding));
+
+      byte[] userBytes = userDB.get(args.getResourceName().getBytes(encoding));
+      ListVolumes volumeList;
+      if (userBytes == null) {
+        volumeList = new ListVolumes();
+      } else {
+        volumeList = ListVolumes.parse(new String(userBytes, encoding));
+      }
+
+      switch (property) {
+      case OWNER:
+        // needs new owner, we delete the volume object from the
+        // old user's volume list
+        removeOldOwner(info);
+        VolumeOwner owner = new VolumeOwner(args.getUserName());
+        // set the new owner
+        info.setOwner(owner);
+        break;
+      case QUOTA:
+        // if this is quota update we just remove the old object from the
+        // current users list and update the same object later.
+        volumeList.getVolumes().remove(info);
+        info.setQuota(args.getQuota());
+        break;
+      default:
+        OzoneException ozEx =
+            ErrorTable.newError(ErrorTable.BAD_PROPERTY, args);
+        ozEx.setMessage("Volume property is not recognized");
+        throw ozEx;
+      }
+
+      volumeList.addVolume(info);
+
+      metadataDB.put(args.getVolumeName().getBytes(encoding),
+          info.toDBString().getBytes(encoding));
+
+      // if this is an owner change this put will create a new owner or update
+      // the owner's volume list.
+      userDB.put(args.getResourceName().getBytes(encoding),
+          volumeList.toDBString().getBytes(encoding));
+
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Removes the old owner from the volume.
+   *
+   * @param info - VolumeInfo
+   * @throws IOException
+   */
+  private void removeOldOwner(VolumeInfo info) throws IOException {
+    // We need to look the owner that we know is the current owner
+    byte[] volumeBytes =
+        userDB.get(info.getOwner().getName().getBytes(encoding));
+    ListVolumes volumeList =
+        ListVolumes.parse(new String(volumeBytes, encoding));
+    volumeList.getVolumes().remove(info);
+
+    // Write the new list info to the old user data
+    userDB.put(info.getOwner().getName().getBytes(encoding),
+        volumeList.toDBString().getBytes(encoding));
+  }
+
+  /**
+   * Checks if you are the owner of a specific volume.
+   *
+   * @param volume - Volume Name whose access permissions needs to be checked
+   * @param acl - requested acls which needs to be checked for access
+   * @return - True if you are the owner, false otherwise
+   * @throws OzoneException
+   */
+  public boolean checkVolumeAccess(String volume, OzoneAcl acl)
+      throws OzoneException {
+    lock.readLock().lock();
+    try {
+      byte[] volumeInfo =
+          metadataDB.get(volume.getBytes(encoding));
+      if (volumeInfo == null) {
+        throw ErrorTable.newError(ErrorTable.VOLUME_NOT_FOUND, null);
+      }
+
+      VolumeInfo info = VolumeInfo.parse(new String(volumeInfo, encoding));
+      return info.getOwner().getName().equals(acl.getName());
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, null, ex);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * getVolumeInfo returns the Volume Info of a specific volume.
+   *
+   * @param args - Volume args
+   * @return VolumeInfo
+   * @throws OzoneException
+   */
+  public VolumeInfo getVolumeInfo(VolumeArgs args) throws OzoneException {
+    lock.readLock().lock();
+    try {
+      byte[] volumeInfo =
+          metadataDB.get(args.getVolumeName().getBytes(encoding));
+      if (volumeInfo == null) {
+        throw ErrorTable.newError(ErrorTable.VOLUME_NOT_FOUND, args);
+      }
+
+      return VolumeInfo.parse(new String(volumeInfo, encoding));
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Returns all the volumes owned by a specific user.
+   *
+   * @param args - User Args
+   * @return - ListVolumes
+   * @throws OzoneException
+   */
+  public ListVolumes listVolumes(ListArgs args) throws OzoneException {
+    lock.readLock().lock();
+    try {
+      if (args.isRootScan()) {
+        return listAllVolumes(args);
+      }
+
+      UserArgs uArgs = (UserArgs) args.getArgs();
+      byte[] volumeList = userDB.get(uArgs.getUserName().getBytes(encoding));
+      if (volumeList == null) {
+        throw ErrorTable.newError(ErrorTable.USER_NOT_FOUND, uArgs);
+      }
+
+      String prefix = args.getPrefix();
+      int maxCount = args.getMaxKeys();
+      String prevKey = args.getPrevKey();
+      if (prevKey != null) {
+        // Format is username/volumeName, in local mode we don't use the
+        // user name since we have a userName DB.
+        String[] volName = args.getPrevKey().split("/");
+        if (volName.length < 2) {
+          throw ErrorTable.newError(ErrorTable.USER_NOT_FOUND, uArgs);
+        }
+        prevKey = volName[1];
+      }
+      return getFilteredVolumes(volumeList, prefix, prevKey, maxCount);
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args.getArgs(), ex);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Returns a List of Volumes that meet the prefix, prevkey and maxCount
+   * constraints.
+   *
+   * @param volumeList - Byte Array of Volume Info.
+   * @param prefix     - prefix string.
+   * @param prevKey    - PrevKey
+   * @param maxCount   - Maximum Count.
+   * @return ListVolumes.
+   * @throws IOException
+   */
+  private ListVolumes getFilteredVolumes(byte[] volumeList, String prefix,
+                                         String prevKey, int maxCount) throws
+      IOException {
+    ListVolumes volumes = ListVolumes.parse(new String(volumeList,
+        encoding));
+    int currentCount = 0;
+    ListIterator<VolumeInfo> iter = volumes.getVolumes().listIterator();
+    ListVolumes filteredVolumes = new ListVolumes();
+    while (currentCount < maxCount && iter.hasNext()) {
+      VolumeInfo vInfo = iter.next();
+      if (isMatchingPrefix(prefix, vInfo) && isAfterKey(prevKey, vInfo)) {
+        filteredVolumes.addVolume(vInfo);
+        currentCount++;
+      }
+    }
+    return filteredVolumes;
+  }
+
+  /**
+   * Returns all volumes in a cluster.
+   *
+   * @param args - ListArgs.
+   * @return ListVolumes.
+   * @throws OzoneException
+   */
+  public ListVolumes listAllVolumes(ListArgs args)
+      throws OzoneException, IOException {
+    String prefix = args.getPrefix();
+    final String prevKey;
+    int maxCount = args.getMaxKeys();
+    String userName = null;
+
+    if (args.getPrevKey() != null) {
+      // Format is username/volumeName
+      String[] volName = args.getPrevKey().split("/");
+      if (volName.length < 2) {
+        throw ErrorTable.newError(ErrorTable.USER_NOT_FOUND, args.getArgs());
+      }
+
+      byte[] userNameBytes = userDB.get(volName[0].getBytes(encoding));
+      userName = new String(userNameBytes, encoding);
+      prevKey = volName[1];
+    } else {
+      userName = new String(userDB.peekAround(0, null).getKey(), encoding);
+      prevKey = null;
+    }
+
+    if (userName == null || userName.isEmpty()) {
+      throw ErrorTable.newError(ErrorTable.USER_NOT_FOUND, args.getArgs());
+    }
+
+    ListVolumes returnSet = new ListVolumes();
+    // we need to iterate through users until we get maxcount volumes
+    // or no more volumes are left.
+    userDB.iterate(null, (key, value) -> {
+      int currentSize = returnSet.getVolumes().size();
+      if (currentSize < maxCount) {
+        String name = new String(key, encoding);
+        byte[] volumeList = userDB.get(name.getBytes(encoding));
+        if (volumeList == null) {
+          throw new IOException(
+              ErrorTable.newError(ErrorTable.USER_NOT_FOUND, args.getArgs()));
+        }
+        returnSet.getVolumes().addAll(
+            getFilteredVolumes(volumeList, prefix, prevKey,
+                maxCount - currentSize).getVolumes());
+        return true;
+      } else {
+        return false;
+      }
+    });
+
+    return returnSet;
+  }
+
+  /**
+   * Checks if a name starts with a matching prefix.
+   *
+   * @param prefix - prefix string.
+   * @param vInfo  - volume info.
+   * @return true or false.
+   */
+  private boolean isMatchingPrefix(String prefix, VolumeInfo vInfo) {
+    if (prefix == null || prefix.isEmpty()) {
+      return true;
+    }
+    return vInfo.getVolumeName().startsWith(prefix);
+  }
+
+  /**
+   * Checks if the key is after the prevKey.
+   *
+   * @param prevKey - String prevKey.
+   * @param vInfo   - volume Info.
+   * @return - true or false.
+   */
+  private boolean isAfterKey(String prevKey, VolumeInfo vInfo) {
+    if (prevKey == null || prevKey.isEmpty()) {
+      return true;
+    }
+    return prevKey.compareTo(vInfo.getVolumeName()) < 0;
+  }
+
+  /**
+   * Deletes a volume if it exists and is empty.
+   *
+   * @param args - volume args
+   * @throws OzoneException
+   */
+  public void deleteVolume(VolumeArgs args) throws OzoneException {
+    lock.writeLock().lock();
+    try {
+      byte[] volumeName =
+          metadataDB.get(args.getVolumeName().getBytes(encoding));
+      if (volumeName == null) {
+        throw ErrorTable.newError(ErrorTable.VOLUME_NOT_FOUND, args);
+      }
+
+      VolumeInfo vInfo = VolumeInfo.parse(new String(volumeName, encoding));
+
+      // Only remove volumes if they are empty.
+      if (vInfo.getBucketCount() > 0) {
+        throw ErrorTable.newError(ErrorTable.VOLUME_NOT_EMPTY, args);
+      }
+
+      ListVolumes volumeList;
+      String user = vInfo.getOwner().getName();
+      byte[] userVolumes = userDB.get(user.getBytes(encoding));
+      if (userVolumes == null) {
+        throw ErrorTable.newError(ErrorTable.VOLUME_NOT_FOUND, args);
+      }
+
+      volumeList = ListVolumes.parse(new String(userVolumes, encoding));
+      volumeList.getVolumes().remove(vInfo);
+
+      metadataDB.delete(args.getVolumeName().getBytes(encoding));
+      userDB.put(user.getBytes(encoding),
+          volumeList.toDBString().getBytes(encoding));
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Create a bucket if it does not exist.
+   *
+   * @param args - BucketArgs
+   * @throws OzoneException
+   */
+  public void createBucket(BucketArgs args) throws OzoneException {
+    lock.writeLock().lock();
+    try {
+      // check if volume exists, buckets cannot be created without volumes
+      byte[] volumeName = metadataDB.get(args.getVolumeName()
+          .getBytes(encoding));
+      if (volumeName == null) {
+        throw ErrorTable.newError(ErrorTable.VOLUME_NOT_FOUND, args);
+      }
+
+      // A resource name is volume/bucket -- That is the key in metadata table
+      byte[] bucketName = metadataDB.get(args.getResourceName()
+          .getBytes(encoding));
+      if (bucketName != null) {
+        throw ErrorTable.newError(ErrorTable.BUCKET_ALREADY_EXISTS, args);
+      }
+
+      BucketInfo bucketInfo =
+          new BucketInfo(args.getVolumeName(), args.getBucketName());
+
+      if (args.getRemoveAcls() != null) {
+        OzoneException ex = ErrorTable.newError(ErrorTable.MALFORMED_ACL, args);
+        ex.setMessage("Remove ACLs specified in bucket create. Please remove "
+            + "them and retry.");
+        throw ex;
+      }
+
+      VolumeInfo volInfo = VolumeInfo.parse(new String(volumeName, encoding));
+      volInfo.setBucketCount(volInfo.getBucketCount() + 1);
+
+      bucketInfo.setAcls(args.getAddAcls());
+      bucketInfo.setStorageType(args.getStorageType());
+      bucketInfo.setVersioning(args.getVersioning());
+      ListBuckets bucketList;
+
+      // get bucket list from user/volume -> bucketList
+      byte[] volumeBuckets = userDB.get(args.getParentName()
+          .getBytes(encoding));
+      if (volumeBuckets == null) {
+        bucketList = new ListBuckets();
+      } else {
+        bucketList = ListBuckets.parse(new String(volumeBuckets, encoding));
+      }
+
+      bucketList.addBucket(bucketInfo);
+      bucketList.sort();
+
+      // Update Volume->bucketCount
+      userDB.put(args.getVolumeName().getBytes(encoding),
+          volInfo.toDBString().getBytes(encoding));
+
+      // Now update the userDB with user/volume -> bucketList
+      userDB.put(args.getParentName().getBytes(encoding),
+          bucketList.toDBString().getBytes(encoding));
+
+      // Update userDB with volume/bucket -> empty key list
+      userDB.put(args.getResourceName().getBytes(encoding),
+          new ListKeys().toDBString().getBytes(encoding));
+
+      // and update the metadataDB with volume/bucket->BucketInfo
+      metadataDB.put(args.getResourceName().getBytes(encoding),
+          bucketInfo.toDBString().getBytes(encoding));
+
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Updates the Bucket properties like ACls and Storagetype.
+   *
+   * @param args     - Bucket Args
+   * @param property - Flag which tells us what property to upgrade
+   * @throws OzoneException
+   */
+  public void setBucketProperty(BucketArgs args, BucketProperty property)
+      throws OzoneException {
+
+    lock.writeLock().lock();
+    try {
+      // volume/bucket-> bucketInfo
+      byte[] bucketInfo = metadataDB.get(args.getResourceName().
+          getBytes(encoding));
+      if (bucketInfo == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+
+      BucketInfo info = BucketInfo.parse(new String(bucketInfo, encoding));
+      byte[] volumeBuckets = userDB.get(args.getParentName()
+          .getBytes(encoding));
+      ListBuckets bucketList = ListBuckets.parse(new String(volumeBuckets,
+          encoding));
+      bucketList.getBuckets().remove(info);
+
+      switch (property) {
+      case ACLS:
+        processRemoveAcls(args, info);
+        processAddAcls(args, info);
+        break;
+      case STORAGETYPE:
+        info.setStorageType(args.getStorageType());
+        break;
+      case VERSIONING:
+        info.setVersioning(args.getVersioning());
+        break;
+      default:
+        OzoneException ozEx =
+            ErrorTable.newError(ErrorTable.BAD_PROPERTY, args);
+        ozEx.setMessage("Bucket property is not recognized.");
+        throw ozEx;
+      }
+
+      bucketList.addBucket(info);
+      metadataDB.put(args.getResourceName().getBytes(encoding),
+          info.toDBString().getBytes(encoding));
+
+      userDB.put(args.getParentName().getBytes(encoding),
+          bucketList.toDBString().getBytes(encoding));
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Process Remove Acls and remove them from the bucket.
+   *
+   * @param args - BucketArgs
+   * @param info - BucketInfo
+   */
+  private void processRemoveAcls(BucketArgs args, BucketInfo info) {
+    List<OzoneAcl> removeAcls = args.getRemoveAcls();
+    if ((removeAcls == null) || (info.getAcls() == null)) {
+      return;
+    }
+    for (OzoneAcl racl : args.getRemoveAcls()) {
+      ListIterator<OzoneAcl> aclIter = info.getAcls().listIterator();
+      while (aclIter.hasNext()) {
+        if (racl.equals(aclIter.next())) {
+          aclIter.remove();
+          break;
+        }
+      }
+    }
+  }
+
+  /**
+   * Process Add Acls and Add them to the bucket.
+   *
+   * @param args - BucketArgs
+   * @param info - BucketInfo
+   */
+  private void processAddAcls(BucketArgs args, BucketInfo info) {
+    List<OzoneAcl> addAcls = args.getAddAcls();
+    if ((addAcls == null)) {
+      return;
+    }
+
+    if (info.getAcls() == null) {
+      info.setAcls(addAcls);
+      return;
+    }
+
+    for (OzoneAcl newacl : addAcls) {
+      ListIterator<OzoneAcl> aclIter = info.getAcls().listIterator();
+      while (aclIter.hasNext()) {
+        if (newacl.equals(aclIter.next())) {
+          continue;
+        }
+      }
+      info.getAcls().add(newacl);
+    }
+  }
+
+  /**
+   * Deletes a given bucket.
+   *
+   * @param args - BucketArgs
+   * @throws OzoneException
+   */
+  public void deleteBucket(BucketArgs args) throws OzoneException {
+    lock.writeLock().lock();
+    try {
+      byte[] bucketInfo = metadataDB.get(args.getResourceName()
+          .getBytes(encoding));
+      if (bucketInfo == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+
+      BucketInfo bInfo = BucketInfo.parse(new String(bucketInfo, encoding));
+
+      // Only remove buckets if they are empty.
+      if (bInfo.getKeyCount() > 0) {
+        throw ErrorTable.newError(ErrorTable.BUCKET_NOT_EMPTY, args);
+      }
+
+      byte[] bucketBytes = userDB.get(args.getParentName().getBytes(encoding));
+      if (bucketBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+
+      ListBuckets bucketList =
+          ListBuckets.parse(new String(bucketBytes, encoding));
+      bucketList.getBuckets().remove(bInfo);
+
+      metadataDB.delete(args.getResourceName().getBytes(encoding));
+      userDB.put(args.getParentName().getBytes(encoding),
+          bucketList.toDBString().getBytes(encoding));
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Returns the Bucket info for a given bucket.
+   *
+   * @param args - Bucket Args
+   * @return BucketInfo   -  Bucket Information
+   * @throws OzoneException
+   */
+  public BucketInfo getBucketInfo(BucketArgs args) throws OzoneException {
+    lock.readLock().lock();
+    try {
+      byte[] bucketBytes = metadataDB.get(args.getResourceName()
+          .getBytes(encoding));
+      if (bucketBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+
+      return BucketInfo.parse(new String(bucketBytes, encoding));
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Returns a list of buckets for a given volume.
+   *
+   * @param args - volume args
+   * @return List of buckets
+   * @throws OzoneException
+   */
+  public ListBuckets listBuckets(ListArgs args) throws OzoneException {
+    lock.readLock().lock();
+    try {
+      Preconditions.checkState(args.getArgs() instanceof VolumeArgs);
+      VolumeArgs vArgs = (VolumeArgs) args.getArgs();
+      String userVolKey = vArgs.getUserName() + "/" + vArgs.getVolumeName();
+
+      // TODO : Query using Prefix and PrevKey
+      byte[] bucketBytes = userDB.get(userVolKey.getBytes(encoding));
+      if (bucketBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_VOLUME_NAME,
+            args.getArgs());
+      }
+      return ListBuckets.parse(new String(bucketBytes, encoding));
+    } catch (IOException ex) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args.getArgs(), ex);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Creates a key and returns a stream to which this key can be written to.
+   *
+   * @param args KeyArgs
+   * @return - A stream into which key can be written to.
+   * @throws OzoneException
+   */
+  public OutputStream createKey(KeyArgs args) throws OzoneException {
+    lock.writeLock().lock();
+    try {
+      String fileNameHash = DigestUtils.sha256Hex(args.getResourceName());
+
+      // Please don't try trillion objects unless the physical file system
+      // is capable of doing that in a single directory.
+
+      String fullPath = storageRoot + OBJECT_DIR + fileNameHash;
+      File f = new File(fullPath);
+
+      // In real ozone it would not be this way, a file will be overwritten
+      // only if the upload is successful.
+      if (f.exists()) {
+        LOG.debug("we are overwriting a file. This is by design.");
+        if (!f.delete()) {
+          LOG.error("Unable to delete the file: {}", fullPath);
+          throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args);
+        }
+      }
+
+      // f.createNewFile();
+      FileOutputStream fsStream = new FileOutputStream(f);
+      inProgressObjects.put(fsStream, fullPath);
+
+      return fsStream;
+    } catch (IOException e) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, e);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * commit keys moves an In progress object into the metadata store so that key
+   * is visible in the metadata operations from that point onwards.
+   *
+   * @param args Object args
+   * @throws OzoneException
+   */
+  public void commitKey(KeyArgs args, OutputStream stream)
+      throws OzoneException {
+    SimpleDateFormat format =
+        new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZ", Locale.US);
+    lock.writeLock().lock();
+
+    try {
+      byte[] bucketInfo = metadataDB.get(args.getParentName()
+          .getBytes(encoding));
+      if (bucketInfo == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_RESOURCE_NAME, args);
+      }
+      BucketInfo bInfo = BucketInfo.parse(new String(bucketInfo, encoding));
+      bInfo.setKeyCount(bInfo.getKeyCount() + 1);
+
+      String fileNameHash = inProgressObjects.get(stream);
+      inProgressObjects.remove(stream);
+      if (fileNameHash == null) {
+        throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args);
+      }
+
+      ListKeys keyList;
+      byte[] bucketListBytes = userDB.get(args.getParentName()
+          .getBytes(encoding));
+      keyList = ListKeys.parse(new String(bucketListBytes, encoding));
+      KeyInfo keyInfo;
+
+      byte[] objectBytes = metadataDB.get(args.getResourceName()
+          .getBytes(encoding));
+
+      if (objectBytes != null) {
+        // we are overwriting an existing object.
+        // TODO : Emit info for Accounting
+        keyInfo = KeyInfo.parse(new String(objectBytes, encoding));
+        keyList.getKeyList().remove(keyInfo);
+      } else {
+        keyInfo = new KeyInfo();
+      }
+
+      keyInfo.setCreatedOn(format.format(new Date(System.currentTimeMillis())));
+
+      // TODO : support version, we need to check if versioning
+      // is switched on the bucket and make appropriate calls.
+      keyInfo.setVersion(0);
+
+      keyInfo.setDataFileName(fileNameHash);
+      keyInfo.setKeyName(args.getKeyName());
+      keyInfo.setMd5hash(args.getHash());
+      keyInfo.setSize(args.getSize());
+
+      keyList.getKeyList().add(keyInfo);
+
+      // if the key exists, we overwrite happily :). since the
+      // earlier call - createObject -  has overwritten the data.
+
+      metadataDB.put(args.getResourceName().getBytes(encoding),
+          keyInfo.toDBString().getBytes(encoding));
+
+      metadataDB.put(args.getParentName().getBytes(encoding),
+          bInfo.toDBString().getBytes(encoding));
+
+      userDB.put(args.getParentName().getBytes(encoding),
+          keyList.toDBString().getBytes(encoding));
+
+    } catch (IOException e) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, e);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * deletes an key from a given bucket.
+   *
+   * @param args - ObjectArgs
+   * @throws OzoneException
+   */
+  public void deleteKey(KeyArgs args) throws OzoneException {
+    lock.writeLock().lock();
+    try {
+      byte[] bucketInfo = metadataDB.get(args.getParentName()
+          .getBytes(encoding));
+      if (bucketInfo == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+      BucketInfo bInfo = BucketInfo.parse(new String(bucketInfo, encoding));
+      bInfo.setKeyCount(bInfo.getKeyCount() - 1);
+
+
+      byte[] bucketListBytes = userDB.get(args.getParentName()
+          .getBytes(encoding));
+      if (bucketListBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+      ListKeys keyList = ListKeys.parse(new String(bucketListBytes, encoding));
+
+
+      byte[] objectBytes = metadataDB.get(args.getResourceName()
+          .getBytes(encoding));
+      if (objectBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_KEY, args);
+      }
+
+      KeyInfo oInfo = KeyInfo.parse(new String(objectBytes, encoding));
+      keyList.getKeyList().remove(oInfo);
+
+      String fileNameHash = DigestUtils.sha256Hex(args.getResourceName());
+
+      String fullPath = storageRoot + OBJECT_DIR + fileNameHash;
+      File f = new File(fullPath);
+
+      if (f.exists()) {
+        if (!f.delete()) {
+          throw ErrorTable.newError(ErrorTable.KEY_OPERATION_CONFLICT, args);
+        }
+      } else {
+        throw ErrorTable.newError(ErrorTable.INVALID_KEY, args);
+      }
+
+
+      metadataDB.delete(args.getResourceName().getBytes(encoding));
+      metadataDB.put(args.getParentName().getBytes(encoding),
+          bInfo.toDBString().getBytes(encoding));
+      userDB.put(args.getParentName().getBytes(encoding),
+          keyList.toDBString().getBytes(encoding));
+    } catch (IOException e) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, e);
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Returns a Stream for the file.
+   *
+   * @param args - Object args
+   * @return Stream
+   * @throws IOException
+   * @throws OzoneException
+   */
+  public LengthInputStream newKeyReader(KeyArgs args)
+      throws IOException, OzoneException {
+    lock.readLock().lock();
+    try {
+      String fileNameHash = DigestUtils.sha256Hex(args.getResourceName());
+      String fullPath = storageRoot + OBJECT_DIR + fileNameHash;
+      File f = new File(fullPath);
+      if (!f.exists()) {
+        throw ErrorTable.newError(ErrorTable.INVALID_RESOURCE_NAME, args);
+      }
+      long size = f.length();
+
+      FileInputStream fileStream = new FileInputStream(f);
+      return new LengthInputStream(fileStream, size);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Returns keys in a bucket.
+   *
+   * @param args
+   * @return List of keys.
+   * @throws IOException
+   * @throws OzoneException
+   */
+  public ListKeys listKeys(ListArgs args) throws IOException, OzoneException {
+    lock.readLock().lock();
+    // TODO : Support Prefix and PrevKey lookup.
+    try {
+      Preconditions.checkState(args.getArgs() instanceof BucketArgs);
+      BucketArgs bArgs = (BucketArgs) args.getArgs();
+      byte[] bucketInfo = metadataDB.get(bArgs.getResourceName()
+          .getBytes(encoding));
+      if (bucketInfo == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_RESOURCE_NAME, bArgs);
+      }
+
+      byte[] bucketListBytes = userDB.get(bArgs.getResourceName()
+          .getBytes(encoding));
+      if (bucketListBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_RESOURCE_NAME, bArgs);
+      }
+      return ListKeys.parse(new String(bucketListBytes, encoding));
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Get the Key information for a given key.
+   *
+   * @param args - Key Args
+   * @return KeyInfo - Key Information
+   * @throws OzoneException
+   */
+  public KeyInfo getKeyInfo(KeyArgs args) throws IOException, OzoneException {
+    lock.readLock().lock();
+    try {
+      byte[] bucketInfo = metadataDB
+          .get(args.getParentName().getBytes(encoding));
+      if (bucketInfo == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+
+      byte[] bucketListBytes = userDB
+          .get(args.getParentName().getBytes(encoding));
+      if (bucketListBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, args);
+      }
+
+      byte[] objectBytes = metadataDB
+          .get(args.getResourceName().getBytes(encoding));
+      if (objectBytes == null) {
+        throw ErrorTable.newError(ErrorTable.INVALID_KEY, args);
+      }
+
+      return KeyInfo.parse(new String(objectBytes, encoding));
+    } catch (IOException e) {
+      throw ErrorTable.newError(ErrorTable.SERVER_ERROR, args, e);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  /**
+   * This is used in updates to volume metadata.
+   */
+  public enum VolumeProperty {
+    OWNER, QUOTA
+  }
+
+  /**
+   * Bucket Properties.
+   */
+  public enum BucketProperty {
+    ACLS, STORAGETYPE, VERSIONING
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/package-info.java
new file mode 100644
index 0000000..6bf6643
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/localstorage/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.localstorage;
\ No newline at end of file
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/LengthInputStreamMessageBodyWriter.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/LengthInputStreamMessageBodyWriter.java
new file mode 100644
index 0000000..6db49d5
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/LengthInputStreamMessageBodyWriter.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.messages;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.MessageBodyWriter;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Type;
+
+/**
+ * Writes outbound HTTP response object bytes.  The content length is determined
+ * from the {@link LengthInputStream}.
+ */
+public final class LengthInputStreamMessageBodyWriter
+    implements MessageBodyWriter<LengthInputStream> {
+  private static final int CHUNK_SIZE = 8192;
+
+  @Override
+  public long getSize(LengthInputStream lis, Class<?> type, Type genericType,
+                      Annotation[] annotations, MediaType mediaType) {
+    return lis.getLength();
+  }
+
+  @Override
+  public boolean isWriteable(Class<?> type, Type genericType,
+                             Annotation[] annotations, MediaType mediaType) {
+    return LengthInputStream.class.isAssignableFrom(type);
+  }
+
+  @Override
+  public void writeTo(LengthInputStream lis, Class<?> type, Type genericType,
+                      Annotation[] annotations, MediaType mediaType,
+                      MultivaluedMap<String, Object> httpHeaders,
+                      OutputStream out) throws IOException {
+    IOUtils.copyBytes(lis, out, CHUNK_SIZE);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/StringMessageBodyWriter.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/StringMessageBodyWriter.java
new file mode 100644
index 0000000..ad637af
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/StringMessageBodyWriter.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.messages;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.MessageBodyWriter;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Type;
+
+/**
+ * Writes outbound HTTP response strings.  We use this rather than the built-in
+ * writer so that we can determine content length from the string length instead
+ * of possibly falling back to a chunked response.
+ */
+public final class StringMessageBodyWriter implements
+    MessageBodyWriter<String> {
+  private static final int CHUNK_SIZE = 8192;
+
+  @Override
+  public long getSize(String str, Class<?> type, Type genericType,
+                      Annotation[] annotations, MediaType mediaType) {
+    return str.length();
+  }
+
+  @Override
+  public boolean isWriteable(Class<?> type, Type genericType,
+                             Annotation[] annotations, MediaType mediaType) {
+    return String.class.isAssignableFrom(type);
+  }
+
+  @Override
+  public void writeTo(String str, Class<?> type, Type genericType,
+                      Annotation[] annotations, MediaType mediaType,
+                      MultivaluedMap<String, Object> httpHeaders,
+                      OutputStream out) throws IOException {
+    IOUtils.copyBytes(new ByteArrayInputStream(
+        str.getBytes(OzoneUtils.ENCODING)), out, CHUNK_SIZE);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/package-info.java
new file mode 100644
index 0000000..273b3f5
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/messages/package-info.java
@@ -0,0 +1,18 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.messages;
\ No newline at end of file
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/CloseableCleanupListener.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/CloseableCleanupListener.java
new file mode 100644
index 0000000..3d9db20
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/CloseableCleanupListener.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelFutureListener;
+import org.apache.hadoop.io.IOUtils;
+
+import java.io.Closeable;
+
+/**
+ * A {@link ChannelFutureListener} that closes {@link Closeable} resources.
+ */
+final class CloseableCleanupListener implements ChannelFutureListener {
+
+  private final Closeable[] closeables;
+
+  /**
+   * Creates a new CloseableCleanupListener.
+   *
+   * @param closeables any number of closeable resources
+   */
+  CloseableCleanupListener(Closeable... closeables) {
+    this.closeables = closeables;
+  }
+
+  @Override
+  public void operationComplete(ChannelFuture future) {
+    IOUtils.cleanupWithLogger(null, closeables);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreChannelHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreChannelHandler.java
new file mode 100644
index 0000000..89c196c
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreChannelHandler.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import io.netty.channel.ChannelFutureListener;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.http.DefaultFullHttpResponse;
+import io.netty.handler.codec.http.HttpResponse;
+import io.netty.handler.codec.http.HttpResponseStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION;
+import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH;
+import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE;
+import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR;
+import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
+
+/**
+ * Abstract base class for the multiple Netty channel handlers used in the
+ * Object Store Netty channel pipeline.
+ */
+abstract class ObjectStoreChannelHandler<T>
+    extends SimpleChannelInboundHandler<T> {
+
+  /** Log usable in all subclasses. */
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(ObjectStoreChannelHandler.class);
+
+  /**
+   * Handles uncaught exceptions in the channel pipeline by sending an internal
+   * server error response if the channel is still active.
+   *
+   * @param ctx ChannelHandlerContext to receive response
+   * @param cause Throwable that was unhandled in the channel pipeline
+   */
+  @Override
+  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
+    LOG.error("Unexpected exception in Netty pipeline.", cause);
+    if (ctx.channel().isActive()) {
+      sendErrorResponse(ctx, INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  /**
+   * Sends an error response.  This method is used when an unexpected error is
+   * encountered within the channel pipeline, outside of the actual Object Store
+   * application.  It always closes the connection, because we can't in general
+   * know the state of the connection when these errors occur, so attempting to
+   * keep the connection alive could be unpredictable.
+   *
+   * @param ctx ChannelHandlerContext to receive response
+   * @param status HTTP response status
+   */
+  protected static void sendErrorResponse(ChannelHandlerContext ctx,
+      HttpResponseStatus status) {
+    HttpResponse nettyResp = new DefaultFullHttpResponse(HTTP_1_1, status);
+    nettyResp.headers().set(CONTENT_LENGTH, 0);
+    nettyResp.headers().set(CONNECTION, CLOSE);
+    ctx.writeAndFlush(nettyResp).addListener(ChannelFutureListener.CLOSE);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreJerseyContainer.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreJerseyContainer.java
new file mode 100644
index 0000000..c7b516f
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreJerseyContainer.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH;
+import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION;
+import static io.netty.handler.codec.http.HttpHeaders.Names.TRANSFER_ENCODING;
+import static io.netty.handler.codec.http.HttpHeaders.Names.HOST;
+import static io.netty.handler.codec.http.HttpHeaders.Values.KEEP_ALIVE;
+import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE;
+import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.TimeUnit;
+
+import com.sun.jersey.core.header.InBoundHeaders;
+import com.sun.jersey.spi.container.ContainerRequest;
+import com.sun.jersey.spi.container.ContainerResponse;
+import com.sun.jersey.spi.container.ContainerResponseWriter;
+import com.sun.jersey.spi.container.WebApplication;
+
+import io.netty.handler.codec.http.DefaultHttpResponse;
+//import io.netty.handler.codec.http.HttpUtil;
+import io.netty.handler.codec.http.HttpHeaders;
+import io.netty.handler.codec.http.HttpRequest;
+import io.netty.handler.codec.http.HttpResponse;
+import io.netty.handler.codec.http.HttpResponseStatus;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.handlers.StorageHandlerBuilder;
+
+/**
+ * This is a custom Jersey container that hosts the Object Store web
+ * application. It supports dispatching an inbound Netty {@link HttpRequest}
+ * to the Object Store Jersey application.  Request dispatching must run
+ * asynchronously, because the Jersey application must consume the inbound
+ * HTTP request from a  piped stream and produce the outbound HTTP response
+ * for another piped stream.The Netty channel handlers consume the connected
+ * ends of these piped streams. Request dispatching cannot run directly on
+ * the Netty threads, or there would be a risk of deadlock (one thread
+ * producing/consuming its end of the pipe  while no other thread is
+ * producing/consuming the opposite end).
+ */
+public final class ObjectStoreJerseyContainer {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ObjectStoreJerseyContainer.class);
+
+  private final WebApplication webapp;
+
+  private StorageHandler storageHandler;
+
+  /**
+   * Creates a new ObjectStoreJerseyContainer.
+   *
+   * @param webapp web application
+   */
+  public ObjectStoreJerseyContainer(WebApplication webapp) {
+    this.webapp = webapp;
+  }
+
+  /**
+   * Sets the {@link StorageHandler}. This must be called before dispatching any
+   * requests.
+   *
+   * @param newStorageHandler {@link StorageHandler} implementation
+   */
+  public void setStorageHandler(StorageHandler newStorageHandler) {
+    this.storageHandler = newStorageHandler;
+  }
+
+  /**
+   * Asynchronously executes an HTTP request.
+   *
+   * @param nettyReq HTTP request
+   * @param reqIn input stream for reading request body
+   * @param respOut output stream for writing response body
+   */
+  public Future<HttpResponse> dispatch(HttpRequest nettyReq, InputStream reqIn,
+                                       OutputStream respOut) {
+    // The request executes on a separate background thread.  As soon as enough
+    // processing has completed to bootstrap the outbound response, the thread
+    // counts down on a latch.  This latch also unblocks callers trying to get
+    // the asynchronous response out of the returned future.
+    final CountDownLatch latch = new CountDownLatch(1);
+    final RequestRunner runner = new RequestRunner(nettyReq, reqIn, respOut,
+        latch);
+    final Thread thread = new Thread(runner);
+    thread.setDaemon(true);
+    thread.start();
+    return new Future<HttpResponse>() {
+
+      private volatile boolean isCancelled = false;
+
+      @Override
+      public boolean cancel(boolean mayInterruptIfRunning) {
+        if (latch.getCount() == 0) {
+          return false;
+        }
+        if (!mayInterruptIfRunning) {
+          return false;
+        }
+        if (!thread.isAlive()) {
+          return false;
+        }
+        thread.interrupt();
+        try {
+          thread.join();
+        } catch (InterruptedException e) {
+          LOG.info("Interrupted while attempting to cancel dispatch thread.");
+          Thread.currentThread().interrupt();
+          return false;
+        }
+        isCancelled = true;
+        return true;
+      }
+
+      @Override
+      public HttpResponse get()
+          throws InterruptedException, ExecutionException {
+        checkCancelled();
+        latch.await();
+        return this.getOrThrow();
+      }
+
+      @Override
+      public HttpResponse get(long timeout, TimeUnit unit)
+          throws InterruptedException, ExecutionException, TimeoutException {
+        checkCancelled();
+        if (!latch.await(timeout, unit)) {
+          throw new TimeoutException(String.format(
+              "Timed out waiting for HttpResponse after %d %s.",
+              timeout, unit.toString().toLowerCase()));
+        }
+        return this.getOrThrow();
+      }
+
+      @Override
+      public boolean isCancelled() {
+        return isCancelled;
+      }
+
+      @Override
+      public boolean isDone() {
+        return !isCancelled && latch.getCount() == 0;
+      }
+
+      private void checkCancelled() {
+        if (isCancelled()) {
+          throw new CancellationException();
+        }
+      }
+
+      private HttpResponse getOrThrow() throws ExecutionException {
+        try {
+          return runner.getResponse();
+        } catch (Exception e) {
+          throw new ExecutionException(e);
+        }
+      }
+    };
+  }
+
+  /**
+   * Runs the actual handling of the HTTP request.
+   */
+  private final class RequestRunner implements Runnable,
+      ContainerResponseWriter {
+
+    private final CountDownLatch latch;
+    private final HttpRequest nettyReq;
+    private final InputStream reqIn;
+    private final OutputStream respOut;
+
+    private Exception exception;
+    private HttpResponse nettyResp;
+
+    /**
+     * Creates a new RequestRunner.
+     *
+     * @param nettyReq HTTP request
+     * @param reqIn input stream for reading request body
+     * @param respOut output stream for writing response body
+     * @param latch for coordinating asynchronous return of HTTP response
+     */
+    RequestRunner(HttpRequest nettyReq, InputStream reqIn,
+                         OutputStream respOut, CountDownLatch latch) {
+      this.latch = latch;
+      this.nettyReq = nettyReq;
+      this.reqIn = reqIn;
+      this.respOut = respOut;
+    }
+
+    @Override
+    public void run() {
+      LOG.trace("begin RequestRunner, nettyReq = {}", this.nettyReq);
+      StorageHandlerBuilder.setStorageHandler(
+          ObjectStoreJerseyContainer.this.storageHandler);
+      try {
+        ContainerRequest jerseyReq = nettyRequestToJerseyRequest(
+            ObjectStoreJerseyContainer.this.webapp, this.nettyReq, this.reqIn);
+        ObjectStoreJerseyContainer.this.webapp.handleRequest(jerseyReq, this);
+      } catch (Exception e) {
+        LOG.error("Error running Jersey Request Runner", e);
+        this.exception = e;
+        this.latch.countDown();
+      } finally {
+        IOUtils.cleanupWithLogger(null, this.reqIn, this.respOut);
+        StorageHandlerBuilder.removeStorageHandler();
+      }
+      LOG.trace("end RequestRunner, nettyReq = {}", this.nettyReq);
+    }
+
+    /**
+     * This is a callback triggered by Jersey as soon as dispatch has completed
+     * to the point of knowing what kind of response to return.  We save the
+     * response and trigger the latch to unblock callers waiting on the
+     * asynchronous return of the response.  Our response always sets a
+     * Content-Length header.  (We do not support Transfer-Encoding: chunked.)
+     * We also return the output stream for Jersey to use for writing the
+     * response body.
+     *
+     * @param contentLength length of response
+     * @param jerseyResp HTTP response returned by Jersey
+     * @return OutputStream for Jersey to use for writing the response body
+     */
+    @Override
+    public OutputStream writeStatusAndHeaders(long contentLength,
+                                              ContainerResponse jerseyResp) {
+      LOG.trace(
+          "begin writeStatusAndHeaders, contentLength = {}, jerseyResp = {}.",
+          contentLength, jerseyResp);
+      this.nettyResp = jerseyResponseToNettyResponse(jerseyResp);
+      this.nettyResp.headers().set(CONTENT_LENGTH, Math.max(0, contentLength));
+      this.nettyResp.headers().set(CONNECTION,
+          HttpHeaders.isKeepAlive(this.nettyReq) ? KEEP_ALIVE : CLOSE);
+      this.latch.countDown();
+      LOG.trace(
+          "end writeStatusAndHeaders, contentLength = {}, jerseyResp = {}.",
+          contentLength, jerseyResp);
+      return this.respOut;
+    }
+
+    /**
+     * This is a callback triggered by Jersey after it has completed writing the
+     * response body to the stream.  We must close the stream here to unblock
+     * the Netty thread consuming the last chunk of the response from the input
+     * end of the piped stream.
+     *
+     * @throws IOException if there is an I/O error
+     */
+    @Override
+    public void finish() throws IOException {
+      IOUtils.cleanupWithLogger(null, this.respOut);
+    }
+
+    /**
+     * Gets the HTTP response calculated by the Jersey application, or throws an
+     * exception if an error occurred during processing.  It only makes sense to
+     * call this method after waiting on the latch to trigger.
+     *
+     * @return HTTP response
+     * @throws Exception if there was an error executing the request
+     */
+    public HttpResponse getResponse() throws Exception {
+      if (this.exception != null) {
+        throw this.exception;
+      }
+      return this.nettyResp;
+    }
+  }
+
+  /**
+   * Converts a Jersey HTTP response object to a Netty HTTP response object.
+   *
+   * @param jerseyResp Jersey HTTP response
+   * @return Netty HTTP response
+   */
+  private static HttpResponse jerseyResponseToNettyResponse(
+      ContainerResponse jerseyResp) {
+    HttpResponse nettyResp = new DefaultHttpResponse(HTTP_1_1,
+        HttpResponseStatus.valueOf(jerseyResp.getStatus()));
+    for (Map.Entry<String, List<Object>> header :
+        jerseyResp.getHttpHeaders().entrySet()) {
+      if (!header.getKey().equalsIgnoreCase(CONTENT_LENGTH.toString()) &&
+          !header.getKey().equalsIgnoreCase(TRANSFER_ENCODING.toString())) {
+        nettyResp.headers().set(header.getKey(), header.getValue());
+      }
+    }
+    return nettyResp;
+  }
+
+  /**
+   * Converts a Netty HTTP request object to a Jersey HTTP request object.
+   *
+   * @param webapp web application
+   * @param nettyReq Netty HTTP request
+   * @param reqIn input stream for reading request body
+   * @return Jersey HTTP request
+   * @throws URISyntaxException if there is an error handling the request URI
+   */
+  private static ContainerRequest nettyRequestToJerseyRequest(
+      WebApplication webapp, HttpRequest nettyReq, InputStream reqIn)
+      throws URISyntaxException {
+    HttpHeaders nettyHeaders = nettyReq.headers();
+    InBoundHeaders jerseyHeaders = new InBoundHeaders();
+    for (String name : nettyHeaders.names()) {
+      jerseyHeaders.put(name, nettyHeaders.getAll(name));
+    }
+    String host = nettyHeaders.get(HOST);
+    String scheme = host.startsWith("https") ? "https://" : "http://";
+    String baseUri = scheme + host + "/";
+    String reqUri = scheme + host + nettyReq.getUri();
+    LOG.trace("baseUri = {}, reqUri = {}", baseUri, reqUri);
+    return new ContainerRequest(webapp, nettyReq.getMethod().name(),
+        new URI(baseUri), new URI(reqUri), jerseyHeaders, reqIn);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreJerseyContainerProvider.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreJerseyContainerProvider.java
new file mode 100644
index 0000000..e943969
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreJerseyContainerProvider.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import com.sun.jersey.api.container.ContainerException;
+import com.sun.jersey.api.core.ResourceConfig;
+import com.sun.jersey.spi.container.ContainerProvider;
+import com.sun.jersey.spi.container.WebApplication;
+
+/**
+ * This is a Jersey {@link ContainerProvider} capable of boostrapping the
+ * Object Store web application into a custom container.  It must be registered
+ * using the Java service loader mechanism by listing it in
+ * META-INF/services/com.sun.jersey.spi.container.ContainerProvider .
+ */
+public final class ObjectStoreJerseyContainerProvider
+    implements ContainerProvider<ObjectStoreJerseyContainer> {
+
+  @Override
+  public ObjectStoreJerseyContainer createContainer(
+      Class<ObjectStoreJerseyContainer> type, ResourceConfig conf,
+      WebApplication webapp) throws ContainerException {
+    return new ObjectStoreJerseyContainer(webapp);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreRestHttpServer.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreRestHttpServer.java
new file mode 100644
index 0000000..8ab9325
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreRestHttpServer.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.BindException;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.net.SocketException;
+import java.nio.channels.ServerSocketChannel;
+import java.util.Enumeration;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
+
+import io.netty.bootstrap.ChannelFactory;
+import io.netty.bootstrap.ServerBootstrap;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelInitializer;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.EventLoopGroup;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.channel.socket.SocketChannel;
+import io.netty.channel.socket.nio.NioServerSocketChannel;
+import io.netty.handler.codec.http.HttpRequestDecoder;
+import io.netty.handler.codec.http.HttpResponseEncoder;
+import io.netty.handler.stream.ChunkedWriteHandler;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .HDDS_REST_HTTP_ADDRESS_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .HDDS_REST_HTTP_ADDRESS_KEY;
+
+/**
+ * Netty based web server for Hdds rest api server.
+ * <p>
+ * Based on the Datanode http serer.
+ */
+public class ObjectStoreRestHttpServer implements Closeable {
+  private final EventLoopGroup bossGroup;
+  private final EventLoopGroup workerGroup;
+  private final ServerSocketChannel externalHttpChannel;
+  private final ServerBootstrap httpServer;
+  private final Configuration conf;
+  private final Configuration confForCreate;
+  private InetSocketAddress httpAddress;
+  static final Log LOG = LogFactory.getLog(ObjectStoreRestHttpServer.class);
+  private final ObjectStoreHandler objectStoreHandler;
+
+  public ObjectStoreRestHttpServer(final Configuration conf,
+      final ServerSocketChannel externalHttpChannel,
+      ObjectStoreHandler objectStoreHandler) throws IOException {
+    this.conf = conf;
+
+    this.confForCreate = new Configuration(conf);
+    this.objectStoreHandler = objectStoreHandler;
+    confForCreate.set(FsPermission.UMASK_LABEL, "000");
+
+    this.bossGroup = new NioEventLoopGroup();
+    this.workerGroup = new NioEventLoopGroup();
+    this.externalHttpChannel = externalHttpChannel;
+
+    this.httpServer = new ServerBootstrap();
+    this.httpServer.group(bossGroup, workerGroup);
+    this.httpServer.childHandler(new ChannelInitializer<SocketChannel>() {
+      @Override
+      protected void initChannel(SocketChannel ch) throws Exception {
+        ChannelPipeline p = ch.pipeline();
+        p.addLast(new HttpRequestDecoder(), new HttpResponseEncoder());
+        // Later we have to support cross-site request forgery (CSRF) Filter
+        p.addLast(new ChunkedWriteHandler(), new ObjectStoreURLDispatcher(
+            objectStoreHandler.getObjectStoreJerseyContainer()));
+      }
+    });
+
+    this.httpServer.childOption(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK,
+        conf.getInt(ScmConfigKeys.HDDS_REST_NETTY_HIGH_WATERMARK,
+            ScmConfigKeys.HDDS_REST_NETTY_HIGH_WATERMARK_DEFAULT));
+    this.httpServer.childOption(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK,
+        conf.getInt(ScmConfigKeys.HDDS_REST_NETTY_LOW_WATERMARK,
+            ScmConfigKeys.HDDS_REST_NETTY_LOW_WATERMARK_DEFAULT));
+
+    if (externalHttpChannel == null) {
+      httpServer.channel(NioServerSocketChannel.class);
+    } else {
+      httpServer.channelFactory(
+          (ChannelFactory<NioServerSocketChannel>) () -> new
+              NioServerSocketChannel(
+              externalHttpChannel) {
+            // The channel has been bounded externally via JSVC,
+            // thus bind() becomes a no-op.
+            @Override
+            protected void doBind(SocketAddress localAddress) throws Exception {
+            }
+          });
+    }
+
+  }
+
+  public InetSocketAddress getHttpAddress() {
+    return httpAddress;
+  }
+
+  public void start() throws IOException {
+    if (httpServer != null) {
+
+      InetSocketAddress infoAddr = NetUtils.createSocketAddr(
+          conf.getTrimmed(HDDS_REST_HTTP_ADDRESS_KEY,
+              HDDS_REST_HTTP_ADDRESS_DEFAULT));
+
+      ChannelFuture f = httpServer.bind(infoAddr);
+      try {
+        f.syncUninterruptibly();
+      } catch (Throwable e) {
+        if (e instanceof BindException) {
+          throw NetUtils.wrapException(null, 0, infoAddr.getHostName(),
+              infoAddr.getPort(), (SocketException) e);
+        } else {
+          throw e;
+        }
+      }
+      httpAddress = (InetSocketAddress) f.channel().localAddress();
+      LOG.info("Listening HDDS REST traffic on " + httpAddress);
+    }
+
+  }
+
+  @Override
+  public void close() throws IOException {
+    bossGroup.shutdownGracefully();
+    workerGroup.shutdownGracefully();
+    if (externalHttpChannel != null) {
+      externalHttpChannel.close();
+    }
+  }
+
+  /**
+   * A minimal {@link FilterConfig} implementation backed by a {@link Map}.
+   */
+  private static final class MapBasedFilterConfig implements FilterConfig {
+
+    private final String filterName;
+    private final Map<String, String> parameters;
+
+    /**
+     * Creates a new MapBasedFilterConfig.
+     *
+     * @param filterName filter name
+     * @param parameters mapping of filter initialization parameters
+     */
+    MapBasedFilterConfig(String filterName,
+        Map<String, String> parameters) {
+      this.filterName = filterName;
+      this.parameters = parameters;
+    }
+
+    @Override
+    public String getFilterName() {
+      return this.filterName;
+    }
+
+    @Override
+    public String getInitParameter(String name) {
+      return this.parameters.get(name);
+    }
+
+    @Override
+    public Enumeration<String> getInitParameterNames() {
+      throw this.notImplemented();
+    }
+
+    @Override
+    public ServletContext getServletContext() {
+      throw this.notImplemented();
+    }
+
+    /**
+     * Creates an exception indicating that an interface method is not
+     * implemented.  These should never be seen in practice, because it is only
+     * used for methods that are not called by {@link RestCsrfPreventionFilter}.
+     *
+     * @return exception indicating method not implemented
+     */
+    private UnsupportedOperationException notImplemented() {
+      return new UnsupportedOperationException(
+          this.getClass().getSimpleName() + " does not implement this method.");
+    }
+  }
+}
+
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreURLDispatcher.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreURLDispatcher.java
new file mode 100644
index 0000000..9020c14
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/ObjectStoreURLDispatcher.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelPipeline;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.handler.codec.http.HttpRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Url dispatcher to bridge netty request to jetty container.
+ */
+@InterfaceAudience.Private
+public class ObjectStoreURLDispatcher
+    extends SimpleChannelInboundHandler<HttpRequest> {
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(ObjectStoreURLDispatcher.class);
+  private final ObjectStoreJerseyContainer objectStoreJerseyContainer;
+
+  ObjectStoreURLDispatcher(
+      ObjectStoreJerseyContainer objectStoreJerseyContainer)
+      throws IOException {
+
+    this.objectStoreJerseyContainer = objectStoreJerseyContainer;
+  }
+
+  @Override
+  protected void channelRead0(ChannelHandlerContext ctx, HttpRequest req)
+      throws Exception {
+    ChannelPipeline p = ctx.pipeline();
+    RequestDispatchObjectStoreChannelHandler h =
+        new RequestDispatchObjectStoreChannelHandler(
+            this.objectStoreJerseyContainer);
+    p.replace(this,
+        RequestDispatchObjectStoreChannelHandler.class.getSimpleName(), h);
+    h.channelRead0(ctx, req);
+
+  }
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/RequestContentObjectStoreChannelHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/RequestContentObjectStoreChannelHandler.java
new file mode 100644
index 0000000..0a2f22d
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/RequestContentObjectStoreChannelHandler.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelFutureListener;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.http.HttpContent;
+import io.netty.handler.codec.http.HttpHeaders;
+import io.netty.handler.codec.http.HttpRequest;
+import io.netty.handler.codec.http.HttpResponse;
+import io.netty.handler.codec.http.LastHttpContent;
+import io.netty.handler.stream.ChunkedStream;
+import org.apache.hadoop.io.IOUtils;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.concurrent.Future;
+
+/**
+ * Object Store Netty channel pipeline handler that handles inbound
+ * {@link HttpContent} fragments for the request body by sending the bytes into
+ * the pipe so that the application dispatch thread can read it.
+ * After receiving the {@link LastHttpContent}, this handler also flushes the
+ * response.
+ */
+public final class RequestContentObjectStoreChannelHandler
+    extends ObjectStoreChannelHandler<HttpContent> {
+
+  private final HttpRequest nettyReq;
+  private final Future<HttpResponse> nettyResp;
+  private final OutputStream reqOut;
+  private final InputStream respIn;
+  private ObjectStoreJerseyContainer jerseyContainer;
+
+  /**
+   * Creates a new RequestContentObjectStoreChannelHandler.
+   *
+   * @param nettyReq HTTP request
+   * @param nettyResp asynchronous HTTP response
+   * @param reqOut output stream for writing request body
+   * @param respIn input stream for reading response body
+   * @param jerseyContainer jerseyContainer to handle the request
+   */
+  public RequestContentObjectStoreChannelHandler(HttpRequest nettyReq,
+      Future<HttpResponse> nettyResp, OutputStream reqOut, InputStream respIn,
+      ObjectStoreJerseyContainer jerseyContainer) {
+    this.nettyReq = nettyReq;
+    this.nettyResp = nettyResp;
+    this.reqOut = reqOut;
+    this.respIn = respIn;
+    this.jerseyContainer = jerseyContainer;
+  }
+
+  @Override
+  public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
+    ctx.flush();
+  }
+
+  @Override
+  public void channelRead0(ChannelHandlerContext ctx, HttpContent content)
+      throws Exception {
+    LOG.trace(
+        "begin RequestContentObjectStoreChannelHandler channelRead0, " +
+        "ctx = {}, content = {}", ctx, content);
+    content.content().readBytes(this.reqOut, content.content().readableBytes());
+    if (content instanceof LastHttpContent) {
+      IOUtils.cleanupWithLogger(null, this.reqOut);
+      ctx.write(this.nettyResp.get());
+      ChannelFuture respFuture = ctx.writeAndFlush(new ChunkedStream(
+          this.respIn));
+      respFuture.addListener(new CloseableCleanupListener(this.respIn));
+      if (!HttpHeaders.isKeepAlive(this.nettyReq)) {
+        respFuture.addListener(ChannelFutureListener.CLOSE);
+      } else {
+        respFuture.addListener(new ChannelFutureListener() {
+          @Override
+          public void operationComplete(ChannelFuture future) throws Exception {
+            // Notify client this is the last content for current request.
+            ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
+            // Reset the pipeline handler for next request to reuses the
+            // same connection.
+            RequestDispatchObjectStoreChannelHandler h =
+                new RequestDispatchObjectStoreChannelHandler(jerseyContainer);
+            ctx.pipeline().replace(ctx.pipeline().last(),
+                RequestDispatchObjectStoreChannelHandler.class.getSimpleName(),
+                h);
+          }
+        });
+      }
+    }
+    LOG.trace(
+        "end RequestContentObjectStoreChannelHandler channelRead0, " +
+        "ctx = {}, content = {}", ctx, content);
+  }
+
+  @Override
+  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
+    super.exceptionCaught(ctx, cause);
+    IOUtils.cleanupWithLogger(null, this.reqOut, this.respIn);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/RequestDispatchObjectStoreChannelHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/RequestDispatchObjectStoreChannelHandler.java
new file mode 100644
index 0000000..add827a
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/RequestDispatchObjectStoreChannelHandler.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.netty;
+
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.http.DefaultFullHttpResponse;
+import io.netty.handler.codec.http.HttpHeaders;
+import io.netty.handler.codec.http.HttpRequest;
+import io.netty.handler.codec.http.HttpResponse;
+import org.apache.hadoop.io.IOUtils;
+
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
+import java.util.concurrent.Future;
+
+import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST;
+import static io.netty.handler.codec.http.HttpResponseStatus.CONTINUE;
+import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
+
+/**
+ * Object Store Netty channel pipeline handler that handles an inbound
+ * {@link HttpRequest} by dispatching it to the Object Store Jersey container.
+ * The handler establishes 2 sets of connected piped streams: one for inbound
+ * request handling and another for outbound response handling.  The relevant
+ * ends of these pipes are handed off to the Jersey application dispatch and the
+ * next channel handler, which is responsible for streaming in the inbound
+ * request body and flushing out the response body.
+ */
+public final class RequestDispatchObjectStoreChannelHandler
+    extends ObjectStoreChannelHandler<HttpRequest> {
+
+  private final ObjectStoreJerseyContainer jerseyContainer;
+
+  private PipedInputStream reqIn;
+  private PipedOutputStream reqOut;
+  private PipedInputStream respIn;
+  private PipedOutputStream respOut;
+
+  /**
+   * Creates a new RequestDispatchObjectStoreChannelHandler.
+   *
+   * @param jerseyContainer Object Store application Jersey container for
+   * request dispatch
+   */
+  public RequestDispatchObjectStoreChannelHandler(
+      ObjectStoreJerseyContainer jerseyContainer) {
+    this.jerseyContainer = jerseyContainer;
+  }
+
+  @Override
+  public void channelRead0(ChannelHandlerContext ctx, HttpRequest nettyReq)
+      throws Exception {
+    LOG.trace("begin RequestDispatchObjectStoreChannelHandler channelRead0, " +
+        "ctx = {}, nettyReq = {}", ctx, nettyReq);
+    if (!nettyReq.getDecoderResult().isSuccess()) {
+      sendErrorResponse(ctx, BAD_REQUEST);
+      return;
+    }
+
+    this.reqIn = new PipedInputStream();
+    this.reqOut = new PipedOutputStream(reqIn);
+    this.respIn = new PipedInputStream();
+    this.respOut = new PipedOutputStream(respIn);
+
+    if (HttpHeaders.is100ContinueExpected(nettyReq)) {
+      LOG.trace("Sending continue response.");
+      ctx.writeAndFlush(new DefaultFullHttpResponse(HTTP_1_1, CONTINUE));
+    }
+
+    Future<HttpResponse> nettyResp = this.jerseyContainer.dispatch(nettyReq,
+        reqIn, respOut);
+
+    ctx.pipeline().replace(this,
+        RequestContentObjectStoreChannelHandler.class.getSimpleName(),
+        new RequestContentObjectStoreChannelHandler(nettyReq, nettyResp,
+            reqOut, respIn, jerseyContainer));
+
+    LOG.trace("end RequestDispatchObjectStoreChannelHandler channelRead0, " +
+        "ctx = {}, nettyReq = {}", ctx, nettyReq);
+  }
+
+  @Override
+  public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
+    super.exceptionCaught(ctx, cause);
+    IOUtils.cleanupWithLogger(null, this.reqIn, this.reqOut, this.respIn,
+        this.respOut);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/package-info.java
new file mode 100644
index 0000000..f4aa675
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/netty/package-info.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Netty-based HTTP server implementation for Ozone.
+ */
+@InterfaceAudience.Private
+package org.apache.hadoop.ozone.web.netty;
+
+import org.apache.hadoop.classification.InterfaceAudience;
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/package-info.java
new file mode 100644
index 0000000..d93edf8
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.web;
+
+/**
+ * Ozone JAX-RS rest interface related classes and handlers.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/DistributedStorageHandler.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/DistributedStorageHandler.java
new file mode 100644
index 0000000..0f4a856
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/DistributedStorageHandler.java
@@ -0,0 +1,539 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.storage;
+
+import com.google.common.base.Strings;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.client.io.LengthInputStream;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.ksm.protocolPB
+    .KeySpaceManagerProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.OzoneConsts.Versioning;
+import org.apache.hadoop.ozone.client.io.ChunkGroupInputStream;
+import org.apache.hadoop.ozone.client.io.ChunkGroupOutputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos;
+import org.apache.hadoop.ozone.protocolPB.KSMPBHelper;
+import org.apache.hadoop.ozone.ksm.KSMConfigKeys;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.ozone.web.request.OzoneQuota;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.KeyArgs;
+import org.apache.hadoop.ozone.web.handlers.ListArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.response.ListVolumes;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.response.VolumeOwner;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.ozone.web.response.ListBuckets;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.response.ListKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.List;
+
+/**
+ * A {@link StorageHandler} implementation that distributes object storage
+ * across the nodes of an HDFS cluster.
+ */
+public final class DistributedStorageHandler implements StorageHandler {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DistributedStorageHandler.class);
+
+  private final StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private final KeySpaceManagerProtocolClientSideTranslatorPB
+      keySpaceManagerClient;
+  private final XceiverClientManager xceiverClientManager;
+  private final OzoneAcl.OzoneACLRights userRights;
+  private final OzoneAcl.OzoneACLRights groupRights;
+  private int chunkSize;
+  private final boolean useRatis;
+  private final HddsProtos.ReplicationType type;
+  private final HddsProtos.ReplicationFactor factor;
+
+  /**
+   * Creates a new DistributedStorageHandler.
+   *
+   * @param conf configuration
+   * @param storageContainerLocation StorageContainerLocationProtocol proxy
+   * @param keySpaceManagerClient KeySpaceManager proxy
+   */
+  public DistributedStorageHandler(OzoneConfiguration conf,
+      StorageContainerLocationProtocolClientSideTranslatorPB
+          storageContainerLocation,
+      KeySpaceManagerProtocolClientSideTranslatorPB
+          keySpaceManagerClient) {
+    this.keySpaceManagerClient = keySpaceManagerClient;
+    this.storageContainerLocationClient = storageContainerLocation;
+    this.xceiverClientManager = new XceiverClientManager(conf);
+    this.useRatis = conf.getBoolean(
+        ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY,
+        ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT);
+
+    if(useRatis) {
+      type = HddsProtos.ReplicationType.RATIS;
+      factor = HddsProtos.ReplicationFactor.THREE;
+    } else {
+      type = HddsProtos.ReplicationType.STAND_ALONE;
+      factor = HddsProtos.ReplicationFactor.ONE;
+    }
+
+    chunkSize = conf.getInt(ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY,
+        ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_DEFAULT);
+    userRights = conf.getEnum(KSMConfigKeys.OZONE_KSM_USER_RIGHTS,
+        KSMConfigKeys.OZONE_KSM_USER_RIGHTS_DEFAULT);
+    groupRights = conf.getEnum(KSMConfigKeys.OZONE_KSM_GROUP_RIGHTS,
+        KSMConfigKeys.OZONE_KSM_GROUP_RIGHTS_DEFAULT);
+    if(chunkSize > ScmConfigKeys.OZONE_SCM_CHUNK_MAX_SIZE) {
+      LOG.warn("The chunk size ({}) is not allowed to be more than"
+              + " the maximum size ({}),"
+              + " resetting to the maximum size.",
+          chunkSize, ScmConfigKeys.OZONE_SCM_CHUNK_MAX_SIZE);
+      chunkSize = ScmConfigKeys.OZONE_SCM_CHUNK_MAX_SIZE;
+    }
+  }
+
+  @Override
+  public void createVolume(VolumeArgs args) throws IOException, OzoneException {
+    long quota = args.getQuota() == null ?
+        OzoneConsts.MAX_QUOTA_IN_BYTES : args.getQuota().sizeInBytes();
+    OzoneAcl userAcl =
+        new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+            args.getUserName(), userRights);
+    KsmVolumeArgs.Builder builder = KsmVolumeArgs.newBuilder();
+    builder.setAdminName(args.getAdminName())
+        .setOwnerName(args.getUserName())
+        .setVolume(args.getVolumeName())
+        .setQuotaInBytes(quota)
+        .addOzoneAcls(KSMPBHelper.convertOzoneAcl(userAcl));
+    if (args.getGroups() != null) {
+      for (String group : args.getGroups()) {
+        OzoneAcl groupAcl =
+            new OzoneAcl(OzoneAcl.OzoneACLType.GROUP, group, groupRights);
+        builder.addOzoneAcls(KSMPBHelper.convertOzoneAcl(groupAcl));
+      }
+    }
+    keySpaceManagerClient.createVolume(builder.build());
+  }
+
+  @Override
+  public void setVolumeOwner(VolumeArgs args) throws
+      IOException, OzoneException {
+    keySpaceManagerClient.setOwner(args.getVolumeName(), args.getUserName());
+  }
+
+  @Override
+  public void setVolumeQuota(VolumeArgs args, boolean remove)
+      throws IOException, OzoneException {
+    long quota = remove ? OzoneConsts.MAX_QUOTA_IN_BYTES :
+        args.getQuota().sizeInBytes();
+    keySpaceManagerClient.setQuota(args.getVolumeName(), quota);
+  }
+
+  @Override
+  public boolean checkVolumeAccess(String volume, OzoneAcl acl)
+      throws IOException, OzoneException {
+    return keySpaceManagerClient
+        .checkVolumeAccess(volume, KSMPBHelper.convertOzoneAcl(acl));
+  }
+
+  @Override
+  public ListVolumes listVolumes(ListArgs args)
+      throws IOException, OzoneException {
+    int maxNumOfKeys = args.getMaxKeys();
+    if (maxNumOfKeys <= 0 ||
+        maxNumOfKeys > OzoneConsts.MAX_LISTVOLUMES_SIZE) {
+      throw new IllegalArgumentException(
+          String.format("Illegal max number of keys specified,"
+                  + " the value must be in range (0, %d], actual : %d.",
+              OzoneConsts.MAX_LISTVOLUMES_SIZE, maxNumOfKeys));
+    }
+
+    List<KsmVolumeArgs> listResult;
+    if (args.isRootScan()) {
+      listResult = keySpaceManagerClient.listAllVolumes(args.getPrefix(),
+          args.getPrevKey(), args.getMaxKeys());
+    } else {
+      UserArgs userArgs = args.getArgs();
+      if (userArgs == null || userArgs.getUserName() == null) {
+        throw new IllegalArgumentException("Illegal argument,"
+            + " missing user argument.");
+      }
+      listResult = keySpaceManagerClient.listVolumeByUser(
+          args.getArgs().getUserName(), args.getPrefix(), args.getPrevKey(),
+          args.getMaxKeys());
+    }
+
+    // TODO Add missing fields createdBy, bucketCount and bytesUsed
+    ListVolumes result = new ListVolumes();
+    for (KsmVolumeArgs volumeArgs : listResult) {
+      VolumeInfo info = new VolumeInfo();
+      KeySpaceManagerProtocolProtos.VolumeInfo
+          infoProto = volumeArgs.getProtobuf();
+      info.setOwner(new VolumeOwner(infoProto.getOwnerName()));
+      info.setQuota(OzoneQuota.getOzoneQuota(infoProto.getQuotaInBytes()));
+      info.setVolumeName(infoProto.getVolume());
+      info.setCreatedOn(OzoneUtils.formatTime(infoProto.getCreationTime()));
+      result.addVolume(info);
+    }
+
+    return result;
+  }
+
+  @Override
+  public void deleteVolume(VolumeArgs args)
+      throws IOException, OzoneException {
+    keySpaceManagerClient.deleteVolume(args.getVolumeName());
+  }
+
+  @Override
+  public VolumeInfo getVolumeInfo(VolumeArgs args)
+      throws IOException, OzoneException {
+    KsmVolumeArgs volumeArgs =
+        keySpaceManagerClient.getVolumeInfo(args.getVolumeName());
+    //TODO: add support for createdOn and other fields in getVolumeInfo
+    VolumeInfo volInfo =
+        new VolumeInfo(volumeArgs.getVolume(), null,
+            volumeArgs.getAdminName());
+    volInfo.setOwner(new VolumeOwner(volumeArgs.getOwnerName()));
+    volInfo.setQuota(OzoneQuota.getOzoneQuota(volumeArgs.getQuotaInBytes()));
+    volInfo.setCreatedOn(OzoneUtils.formatTime(volumeArgs.getCreationTime()));
+    return volInfo;
+  }
+
+  @Override
+  public void createBucket(final BucketArgs args)
+      throws IOException, OzoneException {
+    KsmBucketInfo.Builder builder = KsmBucketInfo.newBuilder();
+    builder.setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName());
+    if(args.getAddAcls() != null) {
+      builder.setAcls(args.getAddAcls());
+    }
+    if(args.getStorageType() != null) {
+      builder.setStorageType(args.getStorageType());
+    }
+    if(args.getVersioning() != null) {
+      builder.setIsVersionEnabled(getBucketVersioningProtobuf(
+          args.getVersioning()));
+    }
+    keySpaceManagerClient.createBucket(builder.build());
+  }
+
+  /**
+   * Converts OzoneConts.Versioning enum to boolean.
+   *
+   * @param version
+   * @return corresponding boolean value
+   */
+  private boolean getBucketVersioningProtobuf(
+      Versioning version) {
+    if(version != null) {
+      switch(version) {
+      case ENABLED:
+        return true;
+      case NOT_DEFINED:
+      case DISABLED:
+      default:
+        return false;
+      }
+    }
+    return false;
+  }
+
+  @Override
+  public void setBucketAcls(BucketArgs args)
+      throws IOException, OzoneException {
+    List<OzoneAcl> removeAcls = args.getRemoveAcls();
+    List<OzoneAcl> addAcls = args.getAddAcls();
+    if(removeAcls != null || addAcls != null) {
+      KsmBucketArgs.Builder builder = KsmBucketArgs.newBuilder();
+      builder.setVolumeName(args.getVolumeName())
+          .setBucketName(args.getBucketName());
+      if(removeAcls != null && !removeAcls.isEmpty()) {
+        builder.setRemoveAcls(args.getRemoveAcls());
+      }
+      if(addAcls != null && !addAcls.isEmpty()) {
+        builder.setAddAcls(args.getAddAcls());
+      }
+      keySpaceManagerClient.setBucketProperty(builder.build());
+    }
+  }
+
+  @Override
+  public void setBucketVersioning(BucketArgs args)
+      throws IOException, OzoneException {
+    KsmBucketArgs.Builder builder = KsmBucketArgs.newBuilder();
+    builder.setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setIsVersionEnabled(getBucketVersioningProtobuf(
+            args.getVersioning()));
+    keySpaceManagerClient.setBucketProperty(builder.build());
+  }
+
+  @Override
+  public void setBucketStorageClass(BucketArgs args)
+      throws IOException, OzoneException {
+    KsmBucketArgs.Builder builder = KsmBucketArgs.newBuilder();
+    builder.setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setStorageType(args.getStorageType());
+    keySpaceManagerClient.setBucketProperty(builder.build());
+  }
+
+  @Override
+  public void deleteBucket(BucketArgs args)
+      throws IOException, OzoneException {
+    keySpaceManagerClient.deleteBucket(args.getVolumeName(),
+        args.getBucketName());
+  }
+
+  @Override
+  public void checkBucketAccess(BucketArgs args)
+      throws IOException, OzoneException {
+    throw new UnsupportedOperationException(
+        "checkBucketAccess not implemented");
+  }
+
+  @Override
+  public ListBuckets listBuckets(ListArgs args)
+      throws IOException, OzoneException {
+    ListBuckets result = new ListBuckets();
+    UserArgs userArgs = args.getArgs();
+    if (userArgs instanceof VolumeArgs) {
+      VolumeArgs va = (VolumeArgs) userArgs;
+      if (Strings.isNullOrEmpty(va.getVolumeName())) {
+        throw new IllegalArgumentException("Illegal argument,"
+            + " volume name cannot be null or empty.");
+      }
+
+      int maxNumOfKeys = args.getMaxKeys();
+      if (maxNumOfKeys <= 0 ||
+          maxNumOfKeys > OzoneConsts.MAX_LISTBUCKETS_SIZE) {
+        throw new IllegalArgumentException(
+            String.format("Illegal max number of keys specified,"
+                + " the value must be in range (0, %d], actual : %d.",
+                OzoneConsts.MAX_LISTBUCKETS_SIZE, maxNumOfKeys));
+      }
+
+      List<KsmBucketInfo> buckets =
+          keySpaceManagerClient.listBuckets(va.getVolumeName(),
+              args.getPrevKey(), args.getPrefix(), args.getMaxKeys());
+
+      // Convert the result for the web layer.
+      for (KsmBucketInfo bucketInfo : buckets) {
+        BucketInfo bk = new BucketInfo();
+        bk.setVolumeName(bucketInfo.getVolumeName());
+        bk.setBucketName(bucketInfo.getBucketName());
+        bk.setStorageType(bucketInfo.getStorageType());
+        bk.setAcls(bucketInfo.getAcls());
+        bk.setCreatedOn(OzoneUtils.formatTime(bucketInfo.getCreationTime()));
+        result.addBucket(bk);
+      }
+      return result;
+    } else {
+      throw new IllegalArgumentException("Illegal argument provided,"
+          + " expecting VolumeArgs type but met "
+          + userArgs.getClass().getSimpleName());
+    }
+  }
+
+  @Override
+  public BucketInfo getBucketInfo(BucketArgs args)
+      throws IOException {
+    String volumeName = args.getVolumeName();
+    String bucketName = args.getBucketName();
+    KsmBucketInfo ksmBucketInfo = keySpaceManagerClient.getBucketInfo(
+        volumeName, bucketName);
+    BucketInfo bucketInfo = new BucketInfo(ksmBucketInfo.getVolumeName(),
+        ksmBucketInfo.getBucketName());
+    if(ksmBucketInfo.getIsVersionEnabled()) {
+      bucketInfo.setVersioning(Versioning.ENABLED);
+    } else {
+      bucketInfo.setVersioning(Versioning.DISABLED);
+    }
+    bucketInfo.setStorageType(ksmBucketInfo.getStorageType());
+    bucketInfo.setAcls(ksmBucketInfo.getAcls());
+    bucketInfo.setCreatedOn(
+        OzoneUtils.formatTime(ksmBucketInfo.getCreationTime()));
+    return bucketInfo;
+  }
+
+  @Override
+  public OutputStream newKeyWriter(KeyArgs args) throws IOException,
+      OzoneException {
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName())
+        .setDataSize(args.getSize())
+        .setType(xceiverClientManager.getType())
+        .setFactor(xceiverClientManager.getFactor())
+        .build();
+    // contact KSM to allocate a block for key.
+    OpenKeySession openKey = keySpaceManagerClient.openKey(keyArgs);
+    ChunkGroupOutputStream groupOutputStream =
+        new ChunkGroupOutputStream.Builder()
+            .setHandler(openKey)
+            .setXceiverClientManager(xceiverClientManager)
+            .setScmClient(storageContainerLocationClient)
+            .setKsmClient(keySpaceManagerClient)
+            .setChunkSize(chunkSize)
+            .setRequestID(args.getRequestID())
+            .setType(xceiverClientManager.getType())
+            .setFactor(xceiverClientManager.getFactor())
+            .build();
+    groupOutputStream.addPreallocateBlocks(
+        openKey.getKeyInfo().getLatestVersionLocations(),
+        openKey.getOpenVersion());
+    return new OzoneOutputStream(groupOutputStream);
+  }
+
+  @Override
+  public void commitKey(KeyArgs args, OutputStream stream) throws
+      IOException, OzoneException {
+    stream.close();
+  }
+
+  @Override
+  public LengthInputStream newKeyReader(KeyArgs args) throws IOException,
+      OzoneException {
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName())
+        .setDataSize(args.getSize())
+        .build();
+    KsmKeyInfo keyInfo = keySpaceManagerClient.lookupKey(keyArgs);
+    return ChunkGroupInputStream.getFromKsmKeyInfo(
+        keyInfo, xceiverClientManager, storageContainerLocationClient,
+        args.getRequestID());
+  }
+
+  @Override
+  public void deleteKey(KeyArgs args) throws IOException, OzoneException {
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName())
+        .build();
+    keySpaceManagerClient.deleteKey(keyArgs);
+  }
+
+  @Override
+  public KeyInfo getKeyInfo(KeyArgs args) throws IOException, OzoneException {
+    KsmKeyArgs keyArgs = new KsmKeyArgs.Builder()
+        .setVolumeName(args.getVolumeName())
+        .setBucketName(args.getBucketName())
+        .setKeyName(args.getKeyName())
+        .build();
+
+    KsmKeyInfo ksmKeyInfo = keySpaceManagerClient.lookupKey(keyArgs);
+    KeyInfo keyInfo = new KeyInfo();
+    keyInfo.setVersion(0);
+    keyInfo.setKeyName(ksmKeyInfo.getKeyName());
+    keyInfo.setSize(ksmKeyInfo.getDataSize());
+    keyInfo.setCreatedOn(
+        OzoneUtils.formatTime(ksmKeyInfo.getCreationTime()));
+    keyInfo.setModifiedOn(
+        OzoneUtils.formatTime(ksmKeyInfo.getModificationTime()));
+    return keyInfo;
+  }
+
+  @Override
+  public ListKeys listKeys(ListArgs args) throws IOException, OzoneException {
+    ListKeys result = new ListKeys();
+    UserArgs userArgs = args.getArgs();
+    if (userArgs instanceof BucketArgs) {
+      BucketArgs bucketArgs = (BucketArgs) userArgs;
+      if (Strings.isNullOrEmpty(bucketArgs.getVolumeName())) {
+        throw new IllegalArgumentException("Illegal argument,"
+            + " volume name cannot be null or empty.");
+      }
+
+      if (Strings.isNullOrEmpty(bucketArgs.getBucketName())) {
+        throw new IllegalArgumentException("Illegal argument,"
+            + " bucket name cannot be null or empty.");
+      }
+
+      int maxNumOfKeys = args.getMaxKeys();
+      if (maxNumOfKeys <= 0 ||
+          maxNumOfKeys > OzoneConsts.MAX_LISTKEYS_SIZE) {
+        throw new IllegalArgumentException(
+            String.format("Illegal max number of keys specified,"
+                + " the value must be in range (0, %d], actual : %d.",
+                OzoneConsts.MAX_LISTKEYS_SIZE, maxNumOfKeys));
+      }
+
+      List<KsmKeyInfo> keys=
+          keySpaceManagerClient.listKeys(bucketArgs.getVolumeName(),
+              bucketArgs.getBucketName(),
+              args.getPrevKey(), args.getPrefix(), args.getMaxKeys());
+
+      // Convert the result for the web layer.
+      for (KsmKeyInfo info : keys) {
+        KeyInfo tempInfo = new KeyInfo();
+        tempInfo.setVersion(0);
+        tempInfo.setKeyName(info.getKeyName());
+        tempInfo.setSize(info.getDataSize());
+        tempInfo.setCreatedOn(
+            OzoneUtils.formatTime(info.getCreationTime()));
+        tempInfo.setModifiedOn(
+            OzoneUtils.formatTime(info.getModificationTime()));
+
+        result.addKey(tempInfo);
+      }
+      return result;
+    } else {
+      throw new IllegalArgumentException("Illegal argument provided,"
+          + " expecting BucketArgs type but met "
+          + userArgs.getClass().getSimpleName());
+    }
+  }
+
+  /**
+   * Closes DistributedStorageHandler.
+   */
+  @Override
+  public void close() {
+    IOUtils.cleanupWithLogger(LOG, xceiverClientManager);
+    IOUtils.cleanupWithLogger(LOG, keySpaceManagerClient);
+    IOUtils.cleanupWithLogger(LOG, storageContainerLocationClient);
+  }
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/package-info.java
new file mode 100644
index 0000000..f5499f5
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/storage/package-info.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Ozone storage handler implementation integrating REST interface front-end
+ * with container data pipeline back-end.
+ */
+@InterfaceAudience.Private
+package org.apache.hadoop.ozone.web.storage;
+
+import org.apache.hadoop.classification.InterfaceAudience;
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/userauth/Simple.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/userauth/Simple.java
new file mode 100644
index 0000000..397c80f
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/userauth/Simple.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.userauth;
+
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.client.rest.headers.Header;
+import org.apache.hadoop.ozone.web.interfaces.UserAuth;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import javax.ws.rs.core.HttpHeaders;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Simple is an UserAuth class that is used in the insecure
+ * mode of ozone. This maps more or less to the simple user scheme in
+ * HDFS.
+ */
+@InterfaceAudience.Private
+public class Simple implements UserAuth {
+  /**
+   * Returns the x-ozone-user or the user on behalf of, This is
+   * used in volume creation path.
+   *
+   * @param userArgs - UserArgs
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public String getOzoneUser(UserArgs userArgs) throws OzoneException {
+    assert userArgs != null : "userArgs cannot be null";
+
+    HttpHeaders headers = userArgs.getHeaders();
+    List<String> users = headers.getRequestHeader(Header.OZONE_USER);
+
+    if ((users == null) || (users.size() == 0)) {
+      return null;
+    }
+    if (users.size() > 1) {
+      throw ErrorTable.newError(ErrorTable.BAD_AUTHORIZATION, userArgs);
+    }
+    return users.get(0).toLowerCase().trim();
+  }
+
+  /**
+   * Returns the user name as a string from the URI and HTTP headers.
+   *
+   * @param userArgs - user args
+   *
+   * @throws OzoneException -- Allows the underlying system
+   * to throw, that error will get propagated to clients
+   */
+  @Override
+  public String getUser(UserArgs userArgs) throws OzoneException {
+    assert userArgs != null : "userArgs cannot be null";
+
+    HttpHeaders headers = userArgs.getHeaders();
+    List<String> users = headers.getRequestHeader(HttpHeaders.AUTHORIZATION);
+    if (users == null || users.size() > 1) {
+      throw ErrorTable.newError(ErrorTable.BAD_AUTHORIZATION, userArgs);
+    }
+
+    if (users.size() == 0) {
+      return null;
+    }
+
+    String user = users.get(0).trim();
+    if (user.startsWith(Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME)) {
+      user = user.replace(Header.OZONE_SIMPLE_AUTHENTICATION_SCHEME, "");
+      return user.toLowerCase().trim();
+    } else {
+      throw ErrorTable.newError(ErrorTable.BAD_AUTHORIZATION, userArgs);
+    }
+  }
+
+
+  /**
+   * Returns true if a user is a Admin - {root and hdfs are treated as admins}.
+   *
+   * @param userArgs - User Args
+   *
+   * @throws OzoneException -- Allows the underlying system
+   * to throw, that error will get propagated to clients
+   */
+  @Override
+  public boolean isAdmin(UserArgs userArgs) throws OzoneException {
+    assert userArgs != null : "userArgs cannot be null";
+
+    String user;
+    String currentUser;
+    try {
+      user = getUser(userArgs);
+      currentUser = UserGroupInformation.getCurrentUser().getShortUserName();
+    } catch (IOException e) {
+      throw ErrorTable.newError(ErrorTable.BAD_AUTHORIZATION, userArgs);
+    }
+    return
+        (user.compareToIgnoreCase(OzoneConsts.OZONE_SIMPLE_ROOT_USER) == 0) ||
+            (user.compareToIgnoreCase(OzoneConsts.OZONE_SIMPLE_HDFS_USER) == 0)
+            || (user.compareToIgnoreCase(currentUser) == 0);
+  }
+
+  /**
+   * Returns true if the request is Anonymous.
+   *
+   * @param userArgs - user Args
+   *
+   * @throws OzoneException -- Allows the underlying system
+   * to throw, that error will get propagated to clients
+   */
+  @Override
+  public boolean isAnonymous(UserArgs userArgs) throws OzoneException {
+    assert userArgs != null : "userArgs cannot be null";
+
+    return getUser(userArgs) == null;
+  }
+
+  /**
+   * Returns true if the name is a recognizable user in the system.
+   *
+   * @param userName - Name of the user
+   * @param userArgs - user Args
+   *
+   * @throws OzoneException -- Allows the underlying system
+   * to throw, that error will get propagated to clients
+   */
+  @Override
+  public boolean isUser(String userName, UserArgs userArgs)
+      throws OzoneException {
+    // In the simple case, all non-null users names are users :)
+    return userName != null;
+  }
+
+  /**
+   * Returns all the Groups that user is a member of.
+   *
+   * @param userArgs - User Args
+   *
+   * @return String Array which contains 0 or more group names
+   *
+   * @throws OzoneException
+   */
+  @Override
+  public String[] getGroups(UserArgs userArgs) throws OzoneException {
+    // Not implemented
+    return null;
+  }
+
+}
diff --git a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/userauth/package-info.java b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/userauth/package-info.java
new file mode 100644
index 0000000..d498fc8
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/userauth/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Netty-based HTTP server implementation for Ozone.
+ */
+package org.apache.hadoop.ozone.web.userauth;
\ No newline at end of file
diff --git a/hadoop-ozone/objectstore-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider b/hadoop-ozone/objectstore-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider
new file mode 100644
index 0000000..2e103fe
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/main/resources/META-INF/services/com.sun.jersey.spi.container.ContainerProvider
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.hadoop.ozone.web.netty.ObjectStoreJerseyContainerProvider
diff --git a/hadoop-ozone/objectstore-service/src/test/java/org/apache/hadoop/ozone/TestErrorCode.java b/hadoop-ozone/objectstore-service/src/test/java/org/apache/hadoop/ozone/TestErrorCode.java
new file mode 100644
index 0000000..abb61bb
--- /dev/null
+++ b/hadoop-ozone/objectstore-service/src/test/java/org/apache/hadoop/ozone/TestErrorCode.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+import org.apache.hadoop.ozone.web.exceptions.ErrorTable;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.junit.Test;
+
+import static junit.framework.TestCase.assertEquals;
+import static org.apache.hadoop.ozone.web.utils.OzoneUtils.getRequestID;
+
+/**
+ * Test Ozone Error Codes.
+ */
+public class TestErrorCode {
+  /**
+   * Test Error Generator functions.
+   */
+  @Test
+  public void testErrorGen() {
+    OzoneException e = ErrorTable
+        .newError(ErrorTable.ACCESS_DENIED, getRequestID(), "/test/path",
+                  "localhost");
+    assertEquals(e.getHostID(), "localhost");
+    assertEquals(e.getShortMessage(),
+                 ErrorTable.ACCESS_DENIED.getShortMessage());
+  }
+
+  @Test
+  public void testErrorGenWithException() {
+    OzoneException e =
+        new OzoneException(ErrorTable.ACCESS_DENIED.getHttpCode(),
+                           "short message", new Exception("Hello"));
+    assertEquals("short message", e.getShortMessage());
+    assertEquals("Hello", e.getMessage());
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/pom.xml b/hadoop-ozone/ozone-manager/pom.xml
new file mode 100644
index 0000000..7f61129
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/pom.xml
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-ozone</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-ozone-ozone-manager</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone OM server</description>
+  <name>Apache Hadoop Ozone OM server</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>ozone</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.bouncycastle</groupId>
+      <artifactId>bcprov-jdk16</artifactId>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <version>2.2.0</version>
+      <scope>test</scope>
+    </dependency>
+
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy web resources</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <copy toDir="${project.build.directory}/webapps">
+                  <fileset dir="${basedir}/src/main/webapps">
+                    <exclude name="**/proto-web.xml"/>
+                  </fileset>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-common-html</id>
+            <phase>prepare-package</phase>
+            <goals>
+              <goal>unpack</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <artifactItems>
+            <artifactItem>
+              <groupId>org.apache.hadoop</groupId>
+              <artifactId>hadoop-hdds-server-framework</artifactId>
+              <outputDirectory>${project.build.directory}/</outputDirectory>
+              <includes>webapps/static/**/*.*</includes>
+            </artifactItem>
+          </artifactItems>
+          <overWriteSnapshots>true</overWriteSnapshots>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/BucketManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/BucketManager.java
new file mode 100644
index 0000000..6c756913
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/BucketManager.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * BucketManager handles all the bucket level operations.
+ */
+public interface BucketManager {
+  /**
+   * Creates a bucket.
+   * @param bucketInfo - KsmBucketInfo for creating bucket.
+   */
+  void createBucket(KsmBucketInfo bucketInfo) throws IOException;
+  /**
+   * Returns Bucket Information.
+   * @param volumeName - Name of the Volume.
+   * @param bucketName - Name of the Bucket.
+   */
+  KsmBucketInfo getBucketInfo(String volumeName, String bucketName)
+      throws IOException;
+
+  /**
+   * Sets bucket property from args.
+   * @param args - BucketArgs.
+   * @throws IOException
+   */
+  void setBucketProperty(KsmBucketArgs args) throws IOException;
+
+  /**
+   * Deletes an existing empty bucket from volume.
+   * @param volumeName - Name of the volume.
+   * @param bucketName - Name of the bucket.
+   * @throws IOException
+   */
+  void deleteBucket(String volumeName, String bucketName) throws IOException;
+
+  /**
+   * Returns a list of buckets represented by {@link KsmBucketInfo}
+   * in the given volume.
+   *
+   * @param volumeName
+   *   Required parameter volume name determines buckets in which volume
+   *   to return.
+   * @param startBucket
+   *   Optional start bucket name parameter indicating where to start
+   *   the bucket listing from, this key is excluded from the result.
+   * @param bucketPrefix
+   *   Optional start key parameter, restricting the response to buckets
+   *   that begin with the specified name.
+   * @param maxNumOfBuckets
+   *   The maximum number of buckets to return. It ensures
+   *   the size of the result will not exceed this limit.
+   * @return a list of buckets.
+   * @throws IOException
+   */
+  List<KsmBucketInfo> listBuckets(String volumeName,
+      String startBucket, String bucketPrefix, int maxNumOfBuckets)
+      throws IOException;
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/BucketManagerImpl.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/BucketManagerImpl.java
new file mode 100644
index 0000000..957a6d9
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/BucketManagerImpl.java
@@ -0,0 +1,315 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.BucketInfo;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.apache.hadoop.util.Time;
+import org.iq80.leveldb.DBException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * KSM bucket manager.
+ */
+public class BucketManagerImpl implements BucketManager {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BucketManagerImpl.class);
+
+  /**
+   * KSMMetadataManager is used for accessing KSM MetadataDB and ReadWriteLock.
+   */
+  private final KSMMetadataManager metadataManager;
+
+  /**
+   * Constructs BucketManager.
+   * @param metadataManager
+   */
+  public BucketManagerImpl(KSMMetadataManager metadataManager){
+    this.metadataManager = metadataManager;
+  }
+
+  /**
+   * MetadataDB is maintained in MetadataManager and shared between
+   * BucketManager and VolumeManager. (and also by KeyManager)
+   *
+   * BucketManager uses MetadataDB to store bucket level information.
+   *
+   * Keys used in BucketManager for storing data into MetadataDB
+   * for BucketInfo:
+   * {volume/bucket} -> bucketInfo
+   *
+   * Work flow of create bucket:
+   *
+   * -> Check if the Volume exists in metadataDB, if not throw
+   * VolumeNotFoundException.
+   * -> Else check if the Bucket exists in metadataDB, if so throw
+   * BucketExistException
+   * -> Else update MetadataDB with VolumeInfo.
+   */
+
+  /**
+   * Creates a bucket.
+   * @param bucketInfo - KsmBucketInfo.
+   */
+  @Override
+  public void createBucket(KsmBucketInfo bucketInfo) throws IOException {
+    Preconditions.checkNotNull(bucketInfo);
+    metadataManager.writeLock().lock();
+    String volumeName = bucketInfo.getVolumeName();
+    String bucketName = bucketInfo.getBucketName();
+    try {
+      byte[] volumeKey = metadataManager.getVolumeKey(volumeName);
+      byte[] bucketKey = metadataManager.getBucketKey(volumeName, bucketName);
+
+      //Check if the volume exists
+      if (metadataManager.get(volumeKey) == null) {
+        LOG.debug("volume: {} not found ", volumeName);
+        throw new KSMException("Volume doesn't exist",
+            KSMException.ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+      //Check if bucket already exists
+      if (metadataManager.get(bucketKey) != null) {
+        LOG.debug("bucket: {} already exists ", bucketName);
+        throw new KSMException("Bucket already exist",
+            KSMException.ResultCodes.FAILED_BUCKET_ALREADY_EXISTS);
+      }
+
+      KsmBucketInfo ksmBucketInfo = KsmBucketInfo.newBuilder()
+          .setVolumeName(bucketInfo.getVolumeName())
+          .setBucketName(bucketInfo.getBucketName())
+          .setAcls(bucketInfo.getAcls())
+          .setStorageType(bucketInfo.getStorageType())
+          .setIsVersionEnabled(bucketInfo.getIsVersionEnabled())
+          .setCreationTime(Time.now())
+          .build();
+      metadataManager.put(bucketKey, ksmBucketInfo.getProtobuf().toByteArray());
+
+      LOG.debug("created bucket: {} in volume: {}", bucketName, volumeName);
+    } catch (IOException | DBException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Bucket creation failed for bucket:{} in volume:{}",
+            bucketName, volumeName, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Returns Bucket Information.
+   *
+   * @param volumeName - Name of the Volume.
+   * @param bucketName - Name of the Bucket.
+   */
+  @Override
+  public KsmBucketInfo getBucketInfo(String volumeName, String bucketName)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    metadataManager.readLock().lock();
+    try {
+      byte[] bucketKey = metadataManager.getBucketKey(volumeName, bucketName);
+      byte[] value = metadataManager.get(bucketKey);
+      if (value == null) {
+        LOG.debug("bucket: {} not found in volume: {}.", bucketName,
+            volumeName);
+        throw new KSMException("Bucket not found",
+            KSMException.ResultCodes.FAILED_BUCKET_NOT_FOUND);
+      }
+      return KsmBucketInfo.getFromProtobuf(BucketInfo.parseFrom(value));
+    } catch (IOException | DBException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Exception while getting bucket info for bucket: {}",
+            bucketName, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+
+  /**
+   * Sets bucket property from args.
+   * @param args - BucketArgs.
+   * @throws IOException
+   */
+  @Override
+  public void setBucketProperty(KsmBucketArgs args) throws IOException {
+    Preconditions.checkNotNull(args);
+    metadataManager.writeLock().lock();
+    String volumeName = args.getVolumeName();
+    String bucketName = args.getBucketName();
+    try {
+      byte[] bucketKey = metadataManager.getBucketKey(volumeName, bucketName);
+      //Check if volume exists
+      if(metadataManager.get(metadataManager.getVolumeKey(volumeName)) ==
+          null) {
+        LOG.debug("volume: {} not found ", volumeName);
+        throw new KSMException("Volume doesn't exist",
+            KSMException.ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+      byte[] value = metadataManager.get(bucketKey);
+      //Check if bucket exist
+      if(value == null) {
+        LOG.debug("bucket: {} not found ", bucketName);
+        throw new KSMException("Bucket doesn't exist",
+            KSMException.ResultCodes.FAILED_BUCKET_NOT_FOUND);
+      }
+      KsmBucketInfo oldBucketInfo = KsmBucketInfo.getFromProtobuf(
+          BucketInfo.parseFrom(value));
+      KsmBucketInfo.Builder bucketInfoBuilder = KsmBucketInfo.newBuilder();
+      bucketInfoBuilder.setVolumeName(oldBucketInfo.getVolumeName())
+          .setBucketName(oldBucketInfo.getBucketName());
+
+      //Check ACLs to update
+      if(args.getAddAcls() != null || args.getRemoveAcls() != null) {
+        bucketInfoBuilder.setAcls(getUpdatedAclList(oldBucketInfo.getAcls(),
+            args.getRemoveAcls(), args.getAddAcls()));
+        LOG.debug("Updating ACLs for bucket: {} in volume: {}",
+            bucketName, volumeName);
+      } else {
+        bucketInfoBuilder.setAcls(oldBucketInfo.getAcls());
+      }
+
+      //Check StorageType to update
+      StorageType storageType = args.getStorageType();
+      if (storageType != null) {
+        bucketInfoBuilder.setStorageType(storageType);
+        LOG.debug("Updating bucket storage type for bucket: {} in volume: {}",
+            bucketName, volumeName);
+      } else {
+        bucketInfoBuilder.setStorageType(oldBucketInfo.getStorageType());
+      }
+
+      //Check Versioning to update
+      Boolean versioning = args.getIsVersionEnabled();
+      if (versioning != null) {
+        bucketInfoBuilder.setIsVersionEnabled(versioning);
+        LOG.debug("Updating bucket versioning for bucket: {} in volume: {}",
+            bucketName, volumeName);
+      } else {
+        bucketInfoBuilder
+            .setIsVersionEnabled(oldBucketInfo.getIsVersionEnabled());
+      }
+      bucketInfoBuilder.setCreationTime(oldBucketInfo.getCreationTime());
+
+      metadataManager.put(bucketKey,
+          bucketInfoBuilder.build().getProtobuf().toByteArray());
+    } catch (IOException | DBException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Setting bucket property failed for bucket:{} in volume:{}",
+            bucketName, volumeName, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Updates the existing ACL list with remove and add ACLs that are passed.
+   * Remove is done before Add.
+   *
+   * @param existingAcls - old ACL list.
+   * @param removeAcls - ACLs to be removed.
+   * @param addAcls - ACLs to be added.
+   * @return updated ACL list.
+   */
+  private List<OzoneAcl> getUpdatedAclList(List<OzoneAcl> existingAcls,
+      List<OzoneAcl> removeAcls, List<OzoneAcl> addAcls) {
+    if(removeAcls != null && !removeAcls.isEmpty()) {
+      existingAcls.removeAll(removeAcls);
+    }
+    if(addAcls != null && !addAcls.isEmpty()) {
+      addAcls.stream().filter(acl -> !existingAcls.contains(acl)).forEach(
+          existingAcls::add);
+    }
+    return existingAcls;
+  }
+
+  /**
+   * Deletes an existing empty bucket from volume.
+   * @param volumeName - Name of the volume.
+   * @param bucketName - Name of the bucket.
+   * @throws IOException
+   */
+  public void deleteBucket(String volumeName, String bucketName)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+    metadataManager.writeLock().lock();
+    try {
+      byte[] bucketKey = metadataManager.getBucketKey(volumeName, bucketName);
+      //Check if volume exists
+      if (metadataManager.get(metadataManager.getVolumeKey(volumeName))
+          == null) {
+        LOG.debug("volume: {} not found ", volumeName);
+        throw new KSMException("Volume doesn't exist",
+            KSMException.ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+      //Check if bucket exist
+      if (metadataManager.get(bucketKey) == null) {
+        LOG.debug("bucket: {} not found ", bucketName);
+        throw new KSMException("Bucket doesn't exist",
+            KSMException.ResultCodes.FAILED_BUCKET_NOT_FOUND);
+      }
+      //Check if bucket is empty
+      if (!metadataManager.isBucketEmpty(volumeName, bucketName)) {
+        LOG.debug("bucket: {} is not empty ", bucketName);
+        throw new KSMException("Bucket is not empty",
+            KSMException.ResultCodes.FAILED_BUCKET_NOT_EMPTY);
+      }
+      metadataManager.delete(bucketKey);
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Delete bucket failed for bucket:{} in volume:{}", bucketName,
+            volumeName, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<KsmBucketInfo> listBuckets(String volumeName,
+      String startBucket, String bucketPrefix, int maxNumOfBuckets)
+      throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    metadataManager.readLock().lock();
+    try {
+      return metadataManager.listBuckets(
+          volumeName, startBucket, bucketPrefix, maxNumOfBuckets);
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMXBean.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMXBean.java
new file mode 100644
index 0000000..bf22332
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMXBean.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.server.ServiceRuntimeInfo;
+
+/**
+ * This is the JMX management interface for ksm information.
+ */
+@InterfaceAudience.Private
+public interface KSMMXBean extends ServiceRuntimeInfo {
+
+  String getRpcPort();
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManager.java
new file mode 100644
index 0000000..f5a2d5b
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManager.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.utils.MetadataStore;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.locks.Lock;
+
+/**
+ * KSM metadata manager interface.
+ */
+public interface KSMMetadataManager {
+  /**
+   * Start metadata manager.
+   */
+  void start();
+
+  /**
+   * Stop metadata manager.
+   */
+  void stop() throws IOException;
+
+  /**
+   * Get metadata store.
+   * @return metadata store.
+   */
+  @VisibleForTesting
+  MetadataStore getStore();
+
+  /**
+   * Returns the read lock used on Metadata DB.
+   * @return readLock
+   */
+  Lock readLock();
+
+  /**
+   * Returns the write lock used on Metadata DB.
+   * @return writeLock
+   */
+  Lock writeLock();
+
+  /**
+   * Returns the value associated with this key.
+   * @param key - key
+   * @return value
+   */
+  byte[] get(byte[] key) throws IOException;
+
+  /**
+   * Puts a Key into Metadata DB.
+   * @param key   - key
+   * @param value - value
+   */
+  void put(byte[] key, byte[] value) throws IOException;
+
+  /**
+   * Deletes a Key from Metadata DB.
+   * @param key   - key
+   */
+  void delete(byte[] key) throws IOException;
+
+  /**
+   * Atomic write a batch of operations.
+   * @param batch
+   * @throws IOException
+   */
+  void writeBatch(BatchOperation batch) throws IOException;
+
+  /**
+   * Given a volume return the corresponding DB key.
+   * @param volume - Volume name
+   */
+  byte[] getVolumeKey(String volume);
+
+  /**
+   * Given a user return the corresponding DB key.
+   * @param user - User name
+   */
+  byte[] getUserKey(String user);
+
+  /**
+   * Given a volume and bucket, return the corresponding DB key.
+   * @param volume - User name
+   * @param bucket - Bucket name
+   */
+  byte[] getBucketKey(String volume, String bucket);
+
+  /**
+   * Given a volume, bucket and a key, return the corresponding DB key.
+   * @param volume - volume name
+   * @param bucket - bucket name
+   * @param key - key name
+   * @return bytes of DB key.
+   */
+  byte[] getDBKeyBytes(String volume, String bucket, String key);
+
+  /**
+   * Returns the DB key name of a deleted key in KSM metadata store.
+   * The name for a deleted key has prefix #deleting# followed by
+   * the actual key name.
+   * @param keyName - key name
+   * @return bytes of DB key.
+   */
+  byte[] getDeletedKeyName(byte[] keyName);
+
+  /**
+   * Returns the DB key name of a open key in KSM metadata store.
+   * Should be #open# prefix followed by actual key name.
+   * @param keyName - key name
+   * @param id - the id for this open
+   * @return bytes of DB key.
+   */
+  byte[] getOpenKeyNameBytes(String keyName, int id);
+
+  /**
+   * Returns the full name of a key given volume name, bucket name and key name.
+   * Generally done by padding certain delimiters.
+   *
+   * @param volumeName - volume name
+   * @param bucketName - bucket name
+   * @param keyName - key name
+   * @return the full key name.
+   */
+  String getKeyWithDBPrefix(String volumeName, String bucketName,
+      String keyName);
+
+  /**
+   * Given a volume, check if it is empty,
+   * i.e there are no buckets inside it.
+   * @param volume - Volume name
+   */
+  boolean isVolumeEmpty(String volume) throws IOException;
+
+  /**
+   * Given a volume/bucket, check if it is empty,
+   * i.e there are no keys inside it.
+   * @param volume - Volume name
+   * @param  bucket - Bucket name
+   * @return true if the bucket is empty
+   */
+  boolean isBucketEmpty(String volume, String bucket) throws IOException;
+
+  /**
+   * Returns a list of buckets represented by {@link KsmBucketInfo}
+   * in the given volume.
+   *
+   * @param volumeName
+   *   the name of the volume. This argument is required,
+   *   this method returns buckets in this given volume.
+   * @param startBucket
+   *   the start bucket name. Only the buckets whose name is
+   *   after this value will be included in the result.
+   *   This key is excluded from the result.
+   * @param bucketPrefix
+   *   bucket name prefix. Only the buckets whose name has
+   *   this prefix will be included in the result.
+   * @param maxNumOfBuckets
+   *   the maximum number of buckets to return. It ensures
+   *   the size of the result will not exceed this limit.
+   * @return a list of buckets.
+   * @throws IOException
+   */
+  List<KsmBucketInfo> listBuckets(String volumeName, String startBucket,
+      String bucketPrefix, int maxNumOfBuckets) throws IOException;
+
+  /**
+   * Returns a list of keys represented by {@link KsmKeyInfo}
+   * in the given bucket.
+   *
+   * @param volumeName
+   *   the name of the volume.
+   * @param bucketName
+   *   the name of the bucket.
+   * @param startKey
+   *   the start key name, only the keys whose name is
+   *   after this value will be included in the result.
+   *   This key is excluded from the result.
+   * @param keyPrefix
+   *   key name prefix, only the keys whose name has
+   *   this prefix will be included in the result.
+   * @param maxKeys
+   *   the maximum number of keys to return. It ensures
+   *   the size of the result will not exceed this limit.
+   * @return a list of keys.
+   * @throws IOException
+   */
+  List<KsmKeyInfo> listKeys(String volumeName,
+      String bucketName, String startKey, String keyPrefix, int maxKeys)
+      throws IOException;
+
+  /**
+   * Returns a list of volumes owned by a given user; if user is null,
+   * returns all volumes.
+   *
+   * @param userName
+   *   volume owner
+   * @param prefix
+   *   the volume prefix used to filter the listing result.
+   * @param startKey
+   *   the start volume name determines where to start listing from,
+   *   this key is excluded from the result.
+   * @param maxKeys
+   *   the maximum number of volumes to return.
+   * @return a list of {@link KsmVolumeArgs}
+   * @throws IOException
+   */
+  List<KsmVolumeArgs> listVolumes(String userName, String prefix,
+      String startKey, int maxKeys) throws IOException;
+
+  /**
+   * Returns a list of pending deletion key info that ups to the given count.
+   * Each entry is a {@link BlockGroup}, which contains the info about the
+   * key name and all its associated block IDs. A pending deletion key is
+   * stored with #deleting# prefix in KSM DB.
+   *
+   * @param count max number of keys to return.
+   * @return a list of {@link BlockGroup} represent keys and blocks.
+   * @throws IOException
+   */
+  List<BlockGroup> getPendingDeletionKeys(int count) throws IOException;
+
+  /**
+   * Returns a list of all still open key info. Which contains the info about
+   * the key name and all its associated block IDs. A pending open key has
+   * prefix #open# in KSM DB.
+   *
+   * @return a list of {@link BlockGroup} representing keys and blocks.
+   * @throws IOException
+   */
+  List<BlockGroup> getExpiredOpenKeys() throws IOException;
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java
new file mode 100644
index 0000000..fa0eaa2
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java
@@ -0,0 +1,525 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException.ResultCodes;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.BucketInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.KeyInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.VolumeInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.VolumeList;
+
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.stream.Collectors;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConsts.DELETING_KEY_PREFIX;
+import static org.apache.hadoop.ozone.OzoneConsts.KSM_DB_NAME;
+import static org.apache.hadoop.ozone.OzoneConsts.OPEN_KEY_ID_DELIMINATOR;
+import static org.apache.hadoop.ozone.OzoneConsts.OPEN_KEY_PREFIX;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_DB_CACHE_SIZE_DEFAULT;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_DB_CACHE_SIZE_MB;
+import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
+
+/**
+ * KSM metadata manager interface.
+ */
+public class KSMMetadataManagerImpl implements KSMMetadataManager {
+
+  private final MetadataStore store;
+  private final ReadWriteLock lock;
+  private final long openKeyExpireThresholdMS;
+
+  public KSMMetadataManagerImpl(OzoneConfiguration conf) throws IOException {
+    File metaDir = getOzoneMetaDirPath(conf);
+    final int cacheSize = conf.getInt(OZONE_KSM_DB_CACHE_SIZE_MB,
+        OZONE_KSM_DB_CACHE_SIZE_DEFAULT);
+    File ksmDBFile = new File(metaDir.getPath(), KSM_DB_NAME);
+    this.store = MetadataStoreBuilder.newBuilder()
+        .setConf(conf)
+        .setDbFile(ksmDBFile)
+        .setCacheSize(cacheSize * OzoneConsts.MB)
+        .build();
+    this.lock = new ReentrantReadWriteLock();
+    this.openKeyExpireThresholdMS = 1000 * conf.getInt(
+        OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS,
+        OZONE_OPEN_KEY_EXPIRE_THRESHOLD_SECONDS_DEFAULT);
+  }
+
+  /**
+   * Start metadata manager.
+   */
+  @Override
+  public void start() {
+
+  }
+
+  /**
+   * Stop metadata manager.
+   */
+  @Override
+  public void stop() throws IOException {
+    if (store != null) {
+      store.close();
+    }
+  }
+
+  /**
+   * Get metadata store.
+   * @return store - metadata store.
+   */
+  @VisibleForTesting
+  @Override
+  public MetadataStore getStore() {
+    return store;
+  }
+
+  /**
+   * Given a volume return the corresponding DB key.
+   * @param volume - Volume name
+   */
+  public byte[] getVolumeKey(String volume) {
+    String dbVolumeName = OzoneConsts.KSM_VOLUME_PREFIX + volume;
+    return DFSUtil.string2Bytes(dbVolumeName);
+  }
+
+  /**
+   * Given a user return the corresponding DB key.
+   * @param user - User name
+   */
+  public byte[] getUserKey(String user) {
+    String dbUserName = OzoneConsts.KSM_USER_PREFIX + user;
+    return DFSUtil.string2Bytes(dbUserName);
+  }
+
+  /**
+   * Given a volume and bucket, return the corresponding DB key.
+   * @param volume - User name
+   * @param bucket - Bucket name
+   */
+  public byte[] getBucketKey(String volume, String bucket) {
+    String bucketKeyString = OzoneConsts.KSM_VOLUME_PREFIX + volume
+        + OzoneConsts.KSM_BUCKET_PREFIX + bucket;
+    return DFSUtil.string2Bytes(bucketKeyString);
+  }
+
+  /**
+   * @param volume
+   * @param bucket
+   * @return
+   */
+  private String getBucketWithDBPrefix(String volume, String bucket) {
+    StringBuffer sb = new StringBuffer();
+    sb.append(OzoneConsts.KSM_VOLUME_PREFIX)
+        .append(volume)
+        .append(OzoneConsts.KSM_BUCKET_PREFIX);
+    if (!Strings.isNullOrEmpty(bucket)) {
+      sb.append(bucket);
+    }
+    return sb.toString();
+  }
+
+  @Override
+  public String getKeyWithDBPrefix(String volume, String bucket, String key) {
+    String keyVB = OzoneConsts.KSM_KEY_PREFIX + volume
+        + OzoneConsts.KSM_KEY_PREFIX + bucket
+        + OzoneConsts.KSM_KEY_PREFIX;
+    return Strings.isNullOrEmpty(key) ? keyVB : keyVB + key;
+  }
+
+  @Override
+  public byte[] getDBKeyBytes(String volume, String bucket, String key) {
+    return DFSUtil.string2Bytes(getKeyWithDBPrefix(volume, bucket, key));
+  }
+
+  @Override
+  public byte[] getDeletedKeyName(byte[] keyName) {
+    return DFSUtil.string2Bytes(
+        DELETING_KEY_PREFIX + DFSUtil.bytes2String(keyName));
+  }
+
+  @Override
+  public byte[] getOpenKeyNameBytes(String keyName, int id) {
+    return DFSUtil.string2Bytes(OPEN_KEY_PREFIX + id +
+        OPEN_KEY_ID_DELIMINATOR + keyName);
+  }
+
+  /**
+   * Returns the read lock used on Metadata DB.
+   * @return readLock
+   */
+  @Override
+  public Lock readLock() {
+    return lock.readLock();
+  }
+
+  /**
+   * Returns the write lock used on Metadata DB.
+   * @return writeLock
+   */
+  @Override
+  public Lock writeLock() {
+    return lock.writeLock();
+  }
+
+  /**
+   * Returns the value associated with this key.
+   * @param key - key
+   * @return value
+   */
+  @Override
+  public byte[] get(byte[] key) throws IOException {
+    return store.get(key);
+  }
+
+  /**
+   * Puts a Key into Metadata DB.
+   * @param key   - key
+   * @param value - value
+   */
+  @Override
+  public void put(byte[] key, byte[] value) throws IOException {
+    store.put(key, value);
+  }
+
+  /**
+   * Deletes a Key from Metadata DB.
+   * @param key   - key
+   */
+  public void delete(byte[] key) throws IOException {
+    store.delete(key);
+  }
+
+  @Override
+  public void writeBatch(BatchOperation batch) throws IOException {
+    this.store.writeBatch(batch);
+  }
+
+  /**
+   * Given a volume, check if it is empty, i.e there are no buckets inside it.
+   * @param volume - Volume name
+   * @return true if the volume is empty
+   */
+  public boolean isVolumeEmpty(String volume) throws IOException {
+    String dbVolumeRootName = OzoneConsts.KSM_VOLUME_PREFIX + volume
+        + OzoneConsts.KSM_BUCKET_PREFIX;
+    byte[] dbVolumeRootKey = DFSUtil.string2Bytes(dbVolumeRootName);
+    ImmutablePair<byte[], byte[]> volumeRoot =
+        store.peekAround(0, dbVolumeRootKey);
+    if (volumeRoot != null) {
+      return !DFSUtil.bytes2String(volumeRoot.getKey())
+          .startsWith(dbVolumeRootName);
+    }
+    return true;
+  }
+
+  /**
+   * Given a volume/bucket, check if it is empty,
+   * i.e there are no keys inside it.
+   * @param volume - Volume name
+   * @param bucket - Bucket name
+   * @return true if the bucket is empty
+   */
+  public boolean isBucketEmpty(String volume, String bucket)
+      throws IOException {
+    String keyRootName = getKeyWithDBPrefix(volume, bucket, null);
+    byte[] keyRoot = DFSUtil.string2Bytes(keyRootName);
+    ImmutablePair<byte[], byte[]> firstKey = store.peekAround(0, keyRoot);
+    if (firstKey != null) {
+      return !DFSUtil.bytes2String(firstKey.getKey())
+          .startsWith(keyRootName);
+    }
+    return true;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<KsmBucketInfo> listBuckets(final String volumeName,
+      final String startBucket, final String bucketPrefix,
+      final int maxNumOfBuckets) throws IOException {
+    List<KsmBucketInfo> result = new ArrayList<>();
+    if (Strings.isNullOrEmpty(volumeName)) {
+      throw new KSMException("Volume name is required.",
+          ResultCodes.FAILED_VOLUME_NOT_FOUND);
+    }
+
+    byte[] volumeNameBytes = getVolumeKey(volumeName);
+    if (store.get(volumeNameBytes) == null) {
+      throw new KSMException("Volume " + volumeName + " not found.",
+          ResultCodes.FAILED_VOLUME_NOT_FOUND);
+    }
+
+
+    // A bucket starts with /#volume/#bucket_prefix
+    MetadataKeyFilter filter = (preKey, currentKey, nextKey) -> {
+      if (currentKey != null) {
+        String bucketNamePrefix =
+                getBucketWithDBPrefix(volumeName, bucketPrefix);
+        String bucket = DFSUtil.bytes2String(currentKey);
+        return bucket.startsWith(bucketNamePrefix);
+      }
+      return false;
+    };
+
+    List<Map.Entry<byte[], byte[]>> rangeResult;
+    if (!Strings.isNullOrEmpty(startBucket)) {
+      // Since we are excluding start key from the result,
+      // the maxNumOfBuckets is incremented.
+      rangeResult = store.getSequentialRangeKVs(
+          getBucketKey(volumeName, startBucket),
+          maxNumOfBuckets + 1, filter);
+      if (!rangeResult.isEmpty()) {
+        //Remove start key from result.
+        rangeResult.remove(0);
+      }
+    } else {
+      rangeResult = store.getSequentialRangeKVs(null, maxNumOfBuckets, filter);
+    }
+
+    for (Map.Entry<byte[], byte[]> entry : rangeResult) {
+      KsmBucketInfo info = KsmBucketInfo.getFromProtobuf(
+          BucketInfo.parseFrom(entry.getValue()));
+      result.add(info);
+    }
+    return result;
+  }
+
+  @Override
+  public List<KsmKeyInfo> listKeys(String volumeName, String bucketName,
+      String startKey, String keyPrefix, int maxKeys) throws IOException {
+    List<KsmKeyInfo> result = new ArrayList<>();
+    if (Strings.isNullOrEmpty(volumeName)) {
+      throw new KSMException("Volume name is required.",
+          ResultCodes.FAILED_VOLUME_NOT_FOUND);
+    }
+
+    if (Strings.isNullOrEmpty(bucketName)) {
+      throw new KSMException("Bucket name is required.",
+          ResultCodes.FAILED_BUCKET_NOT_FOUND);
+    }
+
+    byte[] bucketNameBytes = getBucketKey(volumeName, bucketName);
+    if (store.get(bucketNameBytes) == null) {
+      throw new KSMException("Bucket " + bucketName + " not found.",
+          ResultCodes.FAILED_BUCKET_NOT_FOUND);
+    }
+
+    MetadataKeyFilter filter = new KeyPrefixFilter(
+                getKeyWithDBPrefix(volumeName, bucketName, keyPrefix));
+
+    List<Map.Entry<byte[], byte[]>> rangeResult;
+    if (!Strings.isNullOrEmpty(startKey)) {
+      //Since we are excluding start key from the result,
+      // the maxNumOfBuckets is incremented.
+      rangeResult = store.getSequentialRangeKVs(
+          getDBKeyBytes(volumeName, bucketName, startKey),
+          maxKeys + 1, filter);
+      if (!rangeResult.isEmpty()) {
+        //Remove start key from result.
+        rangeResult.remove(0);
+      }
+    } else {
+      rangeResult = store.getSequentialRangeKVs(null, maxKeys, filter);
+    }
+
+    for (Map.Entry<byte[], byte[]> entry : rangeResult) {
+      KsmKeyInfo info = KsmKeyInfo.getFromProtobuf(
+          KeyInfo.parseFrom(entry.getValue()));
+      result.add(info);
+    }
+    return result;
+  }
+
+  @Override
+  public List<KsmVolumeArgs> listVolumes(String userName,
+      String prefix, String startKey, int maxKeys) throws IOException {
+    List<KsmVolumeArgs> result = Lists.newArrayList();
+    VolumeList volumes;
+    if (Strings.isNullOrEmpty(userName)) {
+      volumes = getAllVolumes();
+    } else {
+      volumes = getVolumesByUser(userName);
+    }
+
+    if (volumes == null || volumes.getVolumeNamesCount() == 0) {
+      return result;
+    }
+
+    boolean startKeyFound = Strings.isNullOrEmpty(startKey);
+    for (String volumeName : volumes.getVolumeNamesList()) {
+      if (!Strings.isNullOrEmpty(prefix)) {
+        if (!volumeName.startsWith(prefix)) {
+          continue;
+        }
+      }
+
+      if (!startKeyFound && volumeName.equals(startKey)) {
+        startKeyFound = true;
+        continue;
+      }
+      if (startKeyFound && result.size() < maxKeys) {
+        byte[] volumeInfo = store.get(this.getVolumeKey(volumeName));
+        if (volumeInfo == null) {
+          // Could not get volume info by given volume name,
+          // since the volume name is loaded from db,
+          // this probably means ksm db is corrupted or some entries are
+          // accidentally removed.
+          throw new KSMException("Volume info not found for " + volumeName,
+              ResultCodes.FAILED_VOLUME_NOT_FOUND);
+        }
+        VolumeInfo info = VolumeInfo.parseFrom(volumeInfo);
+        KsmVolumeArgs volumeArgs = KsmVolumeArgs.getFromProtobuf(info);
+        result.add(volumeArgs);
+      }
+    }
+
+    return result;
+  }
+
+  private VolumeList getVolumesByUser(String userName)
+      throws KSMException {
+    return getVolumesByUser(getUserKey(userName));
+  }
+
+  private VolumeList getVolumesByUser(byte[] userNameKey)
+      throws KSMException {
+    VolumeList volumes = null;
+    try {
+      byte[] volumesInBytes = store.get(userNameKey);
+      if (volumesInBytes == null) {
+        // No volume found for this user, return an empty list
+        return VolumeList.newBuilder().build();
+      }
+      volumes = VolumeList.parseFrom(volumesInBytes);
+    } catch (IOException e) {
+      throw new KSMException("Unable to get volumes info by the given user, "
+          + "metadata might be corrupted", e,
+          ResultCodes.FAILED_METADATA_ERROR);
+    }
+    return volumes;
+  }
+
+  private VolumeList getAllVolumes() throws IOException {
+    // Scan all users in database
+    KeyPrefixFilter filter = new KeyPrefixFilter(OzoneConsts.KSM_USER_PREFIX);
+    // We are not expecting a huge number of users per cluster,
+    // it should be fine to scan all users in db and return us a
+    // list of volume names in string per user.
+    List<Map.Entry<byte[], byte[]>> rangeKVs = store
+        .getSequentialRangeKVs(null, Integer.MAX_VALUE, filter);
+
+    VolumeList.Builder builder = VolumeList.newBuilder();
+    for (Map.Entry<byte[], byte[]> entry : rangeKVs) {
+      VolumeList volumes = this.getVolumesByUser(entry.getKey());
+      builder.addAllVolumeNames(volumes.getVolumeNamesList());
+    }
+
+    return builder.build();
+  }
+
+  @Override
+  public List<BlockGroup> getPendingDeletionKeys(final int count)
+      throws IOException {
+    List<BlockGroup> keyBlocksList = Lists.newArrayList();
+    List<Map.Entry<byte[], byte[]>> rangeResult =
+        store.getRangeKVs(null, count,
+            MetadataKeyFilters.getDeletingKeyFilter());
+    for (Map.Entry<byte[], byte[]> entry : rangeResult) {
+      KsmKeyInfo info =
+          KsmKeyInfo.getFromProtobuf(KeyInfo.parseFrom(entry.getValue()));
+      // Get block keys as a list.
+      KsmKeyLocationInfoGroup latest = info.getLatestVersionLocations();
+      if (latest == null) {
+        return Collections.emptyList();
+      }
+      List<String> item = latest.getLocationList().stream()
+          .map(KsmKeyLocationInfo::getBlockID)
+          .collect(Collectors.toList());
+      BlockGroup keyBlocks = BlockGroup.newBuilder()
+          .setKeyName(DFSUtil.bytes2String(entry.getKey()))
+          .addAllBlockIDs(item)
+          .build();
+      keyBlocksList.add(keyBlocks);
+    }
+    return keyBlocksList;
+  }
+
+  @Override
+  public List<BlockGroup> getExpiredOpenKeys() throws IOException {
+    List<BlockGroup> keyBlocksList = Lists.newArrayList();
+    long now = Time.now();
+    final MetadataKeyFilter openKeyFilter =
+        new KeyPrefixFilter(OPEN_KEY_PREFIX);
+    List<Map.Entry<byte[], byte[]>> rangeResult =
+        store.getSequentialRangeKVs(null, Integer.MAX_VALUE,
+            openKeyFilter);
+    for (Map.Entry<byte[], byte[]> entry : rangeResult) {
+      KsmKeyInfo info =
+          KsmKeyInfo.getFromProtobuf(KeyInfo.parseFrom(entry.getValue()));
+      long lastModify = info.getModificationTime();
+      if (now - lastModify < this.openKeyExpireThresholdMS) {
+        // consider as may still be active, not hanging.
+        continue;
+      }
+      // Get block keys as a list.
+      List<String> item = info.getLatestVersionLocations()
+          .getBlocksLatestVersionOnly().stream()
+          .map(KsmKeyLocationInfo::getBlockID)
+          .collect(Collectors.toList());
+      BlockGroup keyBlocks = BlockGroup.newBuilder()
+          .setKeyName(DFSUtil.bytes2String(entry.getKey()))
+          .addAllBlockIDs(item)
+          .build();
+      keyBlocksList.add(keyBlocks);
+    }
+    return keyBlocksList;
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetrics.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetrics.java
new file mode 100644
index 0000000..bd29012
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetrics.java
@@ -0,0 +1,437 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import com.google.common.annotations.VisibleForTesting;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+
+/**
+ * This class is for maintaining KeySpaceManager statistics.
+ */
+@InterfaceAudience.Private
+@Metrics(about="Key Space Manager Metrics", context="dfs")
+public class KSMMetrics {
+  private static final String SOURCE_NAME =
+      KSMMetrics.class.getSimpleName();
+
+  // KSM request type op metrics
+  private @Metric MutableCounterLong numVolumeOps;
+  private @Metric MutableCounterLong numBucketOps;
+  private @Metric MutableCounterLong numKeyOps;
+
+  // KSM op metrics
+  private @Metric MutableCounterLong numVolumeCreates;
+  private @Metric MutableCounterLong numVolumeUpdates;
+  private @Metric MutableCounterLong numVolumeInfos;
+  private @Metric MutableCounterLong numVolumeCheckAccesses;
+  private @Metric MutableCounterLong numBucketCreates;
+  private @Metric MutableCounterLong numVolumeDeletes;
+  private @Metric MutableCounterLong numBucketInfos;
+  private @Metric MutableCounterLong numBucketUpdates;
+  private @Metric MutableCounterLong numBucketDeletes;
+  private @Metric MutableCounterLong numKeyAllocate;
+  private @Metric MutableCounterLong numKeyLookup;
+  private @Metric MutableCounterLong numKeyDeletes;
+  private @Metric MutableCounterLong numBucketLists;
+  private @Metric MutableCounterLong numKeyLists;
+  private @Metric MutableCounterLong numVolumeLists;
+  private @Metric MutableCounterLong numKeyCommits;
+  private @Metric MutableCounterLong numAllocateBlockCalls;
+  private @Metric MutableCounterLong numGetServiceLists;
+
+  // Failure Metrics
+  private @Metric MutableCounterLong numVolumeCreateFails;
+  private @Metric MutableCounterLong numVolumeUpdateFails;
+  private @Metric MutableCounterLong numVolumeInfoFails;
+  private @Metric MutableCounterLong numVolumeDeleteFails;
+  private @Metric MutableCounterLong numBucketCreateFails;
+  private @Metric MutableCounterLong numVolumeCheckAccessFails;
+  private @Metric MutableCounterLong numBucketInfoFails;
+  private @Metric MutableCounterLong numBucketUpdateFails;
+  private @Metric MutableCounterLong numBucketDeleteFails;
+  private @Metric MutableCounterLong numKeyAllocateFails;
+  private @Metric MutableCounterLong numKeyLookupFails;
+  private @Metric MutableCounterLong numKeyDeleteFails;
+  private @Metric MutableCounterLong numBucketListFails;
+  private @Metric MutableCounterLong numKeyListFails;
+  private @Metric MutableCounterLong numVolumeListFails;
+  private @Metric MutableCounterLong numKeyCommitFails;
+  private @Metric MutableCounterLong numBlockAllocateCallFails;
+  private @Metric MutableCounterLong numGetServiceListFails;
+
+  public KSMMetrics() {
+  }
+
+  public static KSMMetrics create() {
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    return ms.register(SOURCE_NAME,
+        "Key Space Manager Metrics",
+        new KSMMetrics());
+  }
+
+  public void incNumVolumeCreates() {
+    numVolumeOps.incr();
+    numVolumeCreates.incr();
+  }
+
+  public void incNumVolumeUpdates() {
+    numVolumeOps.incr();
+    numVolumeUpdates.incr();
+  }
+
+  public void incNumVolumeInfos() {
+    numVolumeOps.incr();
+    numVolumeInfos.incr();
+  }
+
+  public void incNumVolumeDeletes() {
+    numVolumeOps.incr();
+    numVolumeDeletes.incr();
+  }
+
+  public void incNumVolumeCheckAccesses() {
+    numVolumeOps.incr();
+    numVolumeCheckAccesses.incr();
+  }
+
+  public void incNumBucketCreates() {
+    numBucketOps.incr();
+    numBucketCreates.incr();
+  }
+
+  public void incNumBucketInfos() {
+    numBucketOps.incr();
+    numBucketInfos.incr();
+  }
+
+  public void incNumBucketUpdates() {
+    numBucketOps.incr();
+    numBucketUpdates.incr();
+  }
+
+  public void incNumBucketDeletes() {
+    numBucketOps.incr();
+    numBucketDeletes.incr();
+  }
+
+  public void incNumBucketLists() {
+    numBucketOps.incr();
+    numBucketLists.incr();
+  }
+
+  public void incNumKeyLists() {
+    numKeyOps.incr();
+    numKeyLists.incr();
+  }
+
+  public void incNumVolumeLists() {
+    numVolumeOps.incr();
+    numVolumeLists.incr();
+  }
+
+  public void incNumGetServiceLists() {
+    numGetServiceLists.incr();
+  }
+
+  public void incNumVolumeCreateFails() {
+    numVolumeCreateFails.incr();
+  }
+
+  public void incNumVolumeUpdateFails() {
+    numVolumeUpdateFails.incr();
+  }
+
+  public void incNumVolumeInfoFails() {
+    numVolumeInfoFails.incr();
+  }
+
+  public void incNumVolumeDeleteFails() {
+    numVolumeDeleteFails.incr();
+  }
+
+  public void incNumVolumeCheckAccessFails() {
+    numVolumeCheckAccessFails.incr();
+  }
+
+  public void incNumBucketCreateFails() {
+    numBucketCreateFails.incr();
+  }
+
+  public void incNumBucketInfoFails() {
+    numBucketInfoFails.incr();
+  }
+
+  public void incNumBucketUpdateFails() {
+    numBucketUpdateFails.incr();
+  }
+
+  public void incNumBucketDeleteFails() {
+    numBucketDeleteFails.incr();
+  }
+
+  public void incNumKeyAllocates() {
+    numKeyOps.incr();
+    numKeyAllocate.incr();
+  }
+
+  public void incNumKeyAllocateFails() {
+    numKeyAllocateFails.incr();
+  }
+
+  public void incNumKeyLookups() {
+    numKeyOps.incr();
+    numKeyLookup.incr();
+  }
+
+  public void incNumKeyLookupFails() {
+    numKeyLookupFails.incr();
+  }
+
+  public void incNumKeyDeleteFails() {
+    numKeyDeleteFails.incr();
+  }
+
+  public void incNumKeyDeletes() {
+    numKeyOps.incr();
+    numKeyDeletes.incr();
+  }
+
+  public void incNumKeyCommits() {
+    numKeyOps.incr();
+    numKeyCommits.incr();
+  }
+
+  public void incNumKeyCommitFails() {
+    numKeyCommitFails.incr();
+  }
+
+  public void incNumBlockAllocateCalls() {
+    numAllocateBlockCalls.incr();
+  }
+
+  public void incNumBlockAllocateCallFails() {
+    numBlockAllocateCallFails.incr();
+  }
+
+  public void incNumBucketListFails() {
+    numBucketListFails.incr();
+  }
+
+  public void incNumKeyListFails() {
+    numKeyListFails.incr();
+  }
+
+  public void incNumVolumeListFails() {
+    numVolumeListFails.incr();
+  }
+
+  public void incNumGetServiceListFails() {
+    numGetServiceListFails.incr();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeCreates() {
+    return numVolumeCreates.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeUpdates() {
+    return numVolumeUpdates.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeInfos() {
+    return numVolumeInfos.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeDeletes() {
+    return numVolumeDeletes.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeCheckAccesses() {
+    return numVolumeCheckAccesses.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketCreates() {
+    return numBucketCreates.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketInfos() {
+    return numBucketInfos.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketUpdates() {
+    return numBucketUpdates.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketDeletes() {
+    return numBucketDeletes.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketLists() {
+    return numBucketLists.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeLists() {
+    return numVolumeLists.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyLists() {
+    return numKeyLists.value();
+  }
+
+  @VisibleForTesting
+  public long getNumGetServiceLists() {
+    return numGetServiceLists.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeCreateFails() {
+    return numVolumeCreateFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeUpdateFails() {
+    return numVolumeUpdateFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeInfoFails() {
+    return numVolumeInfoFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeDeleteFails() {
+    return numVolumeDeleteFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeCheckAccessFails() {
+    return numVolumeCheckAccessFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketCreateFails() {
+    return numBucketCreateFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketInfoFails() {
+    return numBucketInfoFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketUpdateFails() {
+    return numBucketUpdateFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketDeleteFails() {
+    return numBucketDeleteFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyAllocates() {
+    return numKeyAllocate.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyAllocateFails() {
+    return numKeyAllocateFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyLookups() {
+    return numKeyLookup.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyLookupFails() {
+    return numKeyLookupFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyDeletes() {
+    return numKeyDeletes.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyDeletesFails() {
+    return numKeyDeleteFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBucketListFails() {
+    return numBucketListFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyListFails() {
+    return numKeyListFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumVolumeListFails() {
+    return numVolumeListFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyCommits() {
+    return numKeyCommits.value();
+  }
+
+  @VisibleForTesting
+  public long getNumKeyCommitFails() {
+    return numKeyCommitFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBlockAllocates() {
+    return numAllocateBlockCalls.value();
+  }
+
+  @VisibleForTesting
+  public long getNumBlockAllocateFails() {
+    return numBlockAllocateCallFails.value();
+  }
+
+  @VisibleForTesting
+  public long getNumGetServiceListFails() {
+    return numGetServiceListFails.value();
+  }
+
+  public void unRegister() {
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    ms.unregisterSource(SOURCE_NAME);
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMStorage.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMStorage.java
new file mode 100644
index 0000000..015bed6
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KSMStorage.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import java.io.IOException;
+import java.util.Properties;
+import java.util.UUID;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.common.Storage;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+
+import static org.apache.hadoop.ozone.OzoneConsts.SCM_ID;
+import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath;
+
+/**
+ * KSMStorage is responsible for management of the StorageDirectories used by
+ * the KSM.
+ */
+public class KSMStorage extends Storage {
+
+  public static final String STORAGE_DIR = "ksm";
+  public static final String KSM_ID = "ksmUuid";
+
+  /**
+   * Construct KSMStorage.
+   * @throws IOException if any directories are inaccessible.
+   */
+  public KSMStorage(OzoneConfiguration conf) throws IOException {
+    super(NodeType.KSM, getOzoneMetaDirPath(conf), STORAGE_DIR);
+  }
+
+  public void setScmId(String scmId) throws IOException {
+    if (getState() == StorageState.INITIALIZED) {
+      throw new IOException("KSM is already initialized.");
+    } else {
+      getStorageInfo().setProperty(SCM_ID, scmId);
+    }
+  }
+
+  public void setKsmId(String ksmId) throws IOException {
+    if (getState() == StorageState.INITIALIZED) {
+      throw new IOException("KSM is already initialized.");
+    } else {
+      getStorageInfo().setProperty(KSM_ID, ksmId);
+    }
+  }
+
+  /**
+   * Retrieves the SCM ID from the version file.
+   * @return SCM_ID
+   */
+  public String getScmId() {
+    return getStorageInfo().getProperty(SCM_ID);
+  }
+
+  /**
+   * Retrieves the KSM ID from the version file.
+   * @return KSM_ID
+   */
+  public String getKsmId() {
+    return getStorageInfo().getProperty(KSM_ID);
+  }
+
+  @Override
+  protected Properties getNodeProperties() {
+    String ksmId = getKsmId();
+    if (ksmId == null) {
+      ksmId = UUID.randomUUID().toString();
+    }
+    Properties ksmProperties = new Properties();
+    ksmProperties.setProperty(KSM_ID, ksmId);
+    return ksmProperties;
+  }
+}
\ No newline at end of file
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyDeletingService.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyDeletingService.java
new file mode 100644
index 0000000..14fb69c
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyDeletingService.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BackgroundService;
+import org.apache.hadoop.utils.BackgroundTask;
+import org.apache.hadoop.utils.BackgroundTaskQueue;
+import org.apache.hadoop.utils.BackgroundTaskResult;
+import org.apache.hadoop.utils.BackgroundTaskResult.EmptyTaskResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys.OZONE_KEY_DELETING_LIMIT_PER_TASK;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys.OZONE_KEY_DELETING_LIMIT_PER_TASK_DEFAULT;
+
+/**
+ * This is the background service to delete keys.
+ * Scan the metadata of ksm periodically to get
+ * the keys with prefix "#deleting" and ask scm to
+ * delete metadata accordingly, if scm returns
+ * success for keys, then clean up those keys.
+ */
+public class KeyDeletingService extends BackgroundService {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(KeyDeletingService.class);
+
+  // The thread pool size for key deleting service.
+  private final static int KEY_DELETING_CORE_POOL_SIZE = 2;
+
+  private final ScmBlockLocationProtocol scmClient;
+  private final KeyManager manager;
+  private final int keyLimitPerTask;
+
+  public KeyDeletingService(ScmBlockLocationProtocol scmClient,
+      KeyManager manager, long serviceInterval,
+      long serviceTimeout, Configuration conf) {
+    super("KeyDeletingService", serviceInterval, TimeUnit.MILLISECONDS,
+        KEY_DELETING_CORE_POOL_SIZE, serviceTimeout);
+    this.scmClient = scmClient;
+    this.manager = manager;
+    this.keyLimitPerTask = conf.getInt(OZONE_KEY_DELETING_LIMIT_PER_TASK,
+        OZONE_KEY_DELETING_LIMIT_PER_TASK_DEFAULT);
+  }
+
+  @Override
+  public BackgroundTaskQueue getTasks() {
+    BackgroundTaskQueue queue = new BackgroundTaskQueue();
+    queue.add(new KeyDeletingTask());
+    return queue;
+  }
+
+  /**
+   * A key deleting task scans KSM DB and looking for a certain number
+   * of pending-deletion keys, sends these keys along with their associated
+   * blocks to SCM for deletion. Once SCM confirms keys are deleted (once
+   * SCM persisted the blocks info in its deletedBlockLog), it removes
+   * these keys from the DB.
+   */
+  private class KeyDeletingTask implements
+      BackgroundTask<BackgroundTaskResult> {
+
+    @Override
+    public int getPriority() {
+      return 0;
+    }
+
+    @Override
+    public BackgroundTaskResult call() throws Exception {
+      try {
+        long startTime = Time.monotonicNow();
+        List<BlockGroup> keyBlocksList = manager
+            .getPendingDeletionKeys(keyLimitPerTask);
+        if (keyBlocksList.size() > 0) {
+          LOG.info("Found {} to-delete keys in KSM", keyBlocksList.size());
+          List<DeleteBlockGroupResult> results =
+              scmClient.deleteKeyBlocks(keyBlocksList);
+          for (DeleteBlockGroupResult result : results) {
+            if (result.isSuccess()) {
+              try {
+                // Purge key from KSM DB.
+                manager.deletePendingDeletionKey(result.getObjectKey());
+                LOG.debug("Key {} deleted from KSM DB", result.getObjectKey());
+              } catch (IOException e) {
+                // if a pending deletion key is failed to delete,
+                // print a warning here and retain it in this state,
+                // so that it can be attempt to delete next time.
+                LOG.warn("Failed to delete pending-deletion key {}",
+                    result.getObjectKey(), e);
+              }
+            } else {
+              // Key deletion failed, retry in next interval.
+              LOG.warn("Key {} deletion failed because some of the blocks"
+                  + " were failed to delete, failed blocks: {}",
+                  result.getObjectKey(),
+                  String.join(",", result.getFailedBlocks()));
+            }
+          }
+
+          if (!results.isEmpty()) {
+            LOG.info("Number of key deleted from KSM DB: {},"
+                + " task elapsed time: {}ms",
+                results.size(), Time.monotonicNow() - startTime);
+          }
+
+          return results::size;
+        } else {
+          LOG.debug("No pending deletion key found in KSM");
+        }
+      } catch (IOException e) {
+        LOG.error("Unable to get pending deletion keys, retry in"
+            + " next interval", e);
+      }
+      return EmptyTaskResult.newResult();
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManager.java
new file mode 100644
index 0000000..e71ce5f
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManager.java
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Handles key level commands.
+ */
+public interface KeyManager {
+
+  /**
+   * Start key manager.
+   */
+  void start();
+
+  /**
+   * Stop key manager.
+   */
+  void stop() throws IOException;
+
+  /**
+   * After calling commit, the key will be made visible. There can be multiple
+   * open key writes in parallel (identified by client id). The most recently
+   * committed one will be the one visible.
+   *
+   * @param args the key to commit.
+   * @param clientID the client that is committing.
+   * @throws IOException
+   */
+  void commitKey(KsmKeyArgs args, int clientID) throws IOException;
+
+  /**
+   * A client calls this on an open key, to request to allocate a new block,
+   * and appended to the tail of current block list of the open client.
+   *
+   * @param args the key to append
+   * @param clientID the client requesting block.
+   * @return the reference to the new block.
+   * @throws IOException
+   */
+  KsmKeyLocationInfo allocateBlock(KsmKeyArgs args, int clientID)
+      throws IOException;
+  /**
+   * Given the args of a key to put, write an open key entry to meta data.
+   *
+   * In case that the container creation or key write failed on
+   * DistributedStorageHandler, this key's metadata will still stay in KSM.
+   * TODO garbage collect the open keys that never get closed
+   *
+   * @param args the args of the key provided by client.
+   * @return a OpenKeySession instance client uses to talk to container.
+   * @throws Exception
+   */
+  OpenKeySession openKey(KsmKeyArgs args) throws IOException;
+
+  /**
+   * Look up an existing key. Return the info of the key to client side, which
+   * DistributedStorageHandler will use to access the data on datanode.
+   *
+   * @param args the args of the key provided by client.
+   * @return a KsmKeyInfo instance client uses to talk to container.
+   * @throws IOException
+   */
+  KsmKeyInfo lookupKey(KsmKeyArgs args) throws IOException;
+
+  /**
+   * Deletes an object by an object key. The key will be immediately removed
+   * from KSM namespace and become invisible to clients. The object data
+   * will be removed in async manner that might retain for some time.
+   *
+   * @param args the args of the key provided by client.
+   * @throws IOException if specified key doesn't exist or
+   * some other I/O errors while deleting an object.
+   */
+  void deleteKey(KsmKeyArgs args) throws IOException;
+
+  /**
+   * Returns a list of keys represented by {@link KsmKeyInfo}
+   * in the given bucket.
+   *
+   * @param volumeName
+   *   the name of the volume.
+   * @param bucketName
+   *   the name of the bucket.
+   * @param startKey
+   *   the start key name, only the keys whose name is
+   *   after this value will be included in the result.
+   *   This key is excluded from the result.
+   * @param keyPrefix
+   *   key name prefix, only the keys whose name has
+   *   this prefix will be included in the result.
+   * @param maxKeys
+   *   the maximum number of keys to return. It ensures
+   *   the size of the result will not exceed this limit.
+   * @return a list of keys.
+   * @throws IOException
+   */
+  List<KsmKeyInfo> listKeys(String volumeName,
+      String bucketName, String startKey, String keyPrefix, int maxKeys)
+      throws IOException;
+
+  /**
+   * Returns a list of pending deletion key info that ups to the given count.
+   * Each entry is a {@link BlockGroup}, which contains the info about the
+   * key name and all its associated block IDs. A pending deletion key is
+   * stored with #deleting# prefix in KSM DB.
+   *
+   * @param count max number of keys to return.
+   * @return a list of {@link BlockGroup} representing keys and blocks.
+   * @throws IOException
+   */
+  List<BlockGroup> getPendingDeletionKeys(int count) throws IOException;
+
+  /**
+   * Deletes a pending deletion key by its name. This is often called when
+   * key can be safely deleted from this layer. Once called, all footprints
+   * of the key will be purged from KSM DB.
+   *
+   * @param objectKeyName object key name with #deleting# prefix.
+   * @throws IOException if specified key doesn't exist or other I/O errors.
+   */
+  void deletePendingDeletionKey(String objectKeyName) throws IOException;
+
+  /**
+   * Returns a list of all still open key info. Which contains the info about
+   * the key name and all its associated block IDs. A pending open key has
+   * prefix #open# in KSM DB.
+   *
+   * @return a list of {@link BlockGroup} representing keys and blocks.
+   * @throws IOException
+   */
+  List<BlockGroup> getExpiredOpenKeys() throws IOException;
+
+  /**
+   * Deletes a expired open key by its name. Called when a hanging key has been
+   * lingering for too long. Once called, the open key entries gets removed
+   * from KSM mdata data.
+   *
+   * @param objectKeyName object key name with #open# prefix.
+   * @throws IOException if specified key doesn't exist or other I/O errors.
+   */
+  void deleteExpiredOpenKey(String objectKeyName) throws IOException;
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManagerImpl.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManagerImpl.java
new file mode 100644
index 0000000..70ba178
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeyManagerImpl.java
@@ -0,0 +1,512 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException.ResultCodes;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfoGroup;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.KeyInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BackgroundService;
+import org.apache.hadoop.utils.BatchOperation;
+import org.iq80.leveldb.DBException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_TIMEOUT;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_KEY_PREALLOCATION_MAXSIZE;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_KEY_PREALLOCATION_MAXSIZE_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_OPEN_KEY_CLEANUP_SERVICE_INTERVAL_SECONDS;
+import static org.apache.hadoop.ozone.OzoneConfigKeys
+    .OZONE_OPEN_KEY_CLEANUP_SERVICE_INTERVAL_SECONDS_DEFAULT;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_DEFAULT;
+import static org.apache.hadoop.ozone
+    .OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_IN_MB;
+import org.apache.hadoop.hdds.protocol
+    .proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol
+    .proto.HddsProtos.ReplicationFactor;
+
+
+/**
+ * Implementation of keyManager.
+ */
+public class KeyManagerImpl implements KeyManager {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(KeyManagerImpl.class);
+
+  /**
+   * A SCM block client, used to talk to SCM to allocate block during putKey.
+   */
+  private final ScmBlockLocationProtocol scmBlockClient;
+  private final KSMMetadataManager metadataManager;
+  private final long scmBlockSize;
+  private final boolean useRatis;
+  private final BackgroundService keyDeletingService;
+  private final BackgroundService openKeyCleanupService;
+
+  private final long preallocateMax;
+  private final Random random;
+  private final String ksmId;
+
+  public KeyManagerImpl(ScmBlockLocationProtocol scmBlockClient,
+      KSMMetadataManager metadataManager, OzoneConfiguration conf,
+      String ksmId) {
+    this.scmBlockClient = scmBlockClient;
+    this.metadataManager = metadataManager;
+    this.scmBlockSize = conf.getLong(OZONE_SCM_BLOCK_SIZE_IN_MB,
+        OZONE_SCM_BLOCK_SIZE_DEFAULT) * OzoneConsts.MB;
+    this.useRatis = conf.getBoolean(DFS_CONTAINER_RATIS_ENABLED_KEY,
+        DFS_CONTAINER_RATIS_ENABLED_DEFAULT);
+    long  blockDeleteInterval = conf.getTimeDuration(
+        OZONE_BLOCK_DELETING_SERVICE_INTERVAL,
+        OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT,
+        TimeUnit.MILLISECONDS);
+    long serviceTimeout = conf.getTimeDuration(
+        OZONE_BLOCK_DELETING_SERVICE_TIMEOUT,
+        OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT,
+        TimeUnit.MILLISECONDS);
+    this.preallocateMax = conf.getLong(
+        OZONE_KEY_PREALLOCATION_MAXSIZE,
+        OZONE_KEY_PREALLOCATION_MAXSIZE_DEFAULT);
+    keyDeletingService = new KeyDeletingService(
+        scmBlockClient, this, blockDeleteInterval, serviceTimeout, conf);
+    int openkeyCheckInterval = conf.getInt(
+        OZONE_OPEN_KEY_CLEANUP_SERVICE_INTERVAL_SECONDS,
+        OZONE_OPEN_KEY_CLEANUP_SERVICE_INTERVAL_SECONDS_DEFAULT);
+    openKeyCleanupService = new OpenKeyCleanupService(
+        scmBlockClient, this, openkeyCheckInterval, serviceTimeout);
+    random = new Random();
+    this.ksmId = ksmId;
+  }
+
+  @VisibleForTesting
+  public BackgroundService getOpenKeyCleanupService() {
+    return openKeyCleanupService;
+  }
+
+  @Override
+  public void start() {
+    keyDeletingService.start();
+    openKeyCleanupService.start();
+  }
+
+  @Override
+  public void stop() throws IOException {
+    keyDeletingService.shutdown();
+    openKeyCleanupService.shutdown();
+  }
+
+  private void validateBucket(String volumeName, String bucketName)
+      throws IOException {
+    byte[] volumeKey = metadataManager.getVolumeKey(volumeName);
+    byte[] bucketKey = metadataManager.getBucketKey(volumeName, bucketName);
+
+    //Check if the volume exists
+    if(metadataManager.get(volumeKey) == null) {
+      LOG.error("volume not found: {}", volumeName);
+      throw new KSMException("Volume not found",
+          KSMException.ResultCodes.FAILED_VOLUME_NOT_FOUND);
+    }
+    //Check if bucket already exists
+    if(metadataManager.get(bucketKey) == null) {
+      LOG.error("bucket not found: {}/{} ", volumeName, bucketName);
+      throw new KSMException("Bucket not found",
+          KSMException.ResultCodes.FAILED_BUCKET_NOT_FOUND);
+    }
+  }
+
+  @Override
+  public KsmKeyLocationInfo allocateBlock(KsmKeyArgs args, int clientID)
+      throws IOException {
+    Preconditions.checkNotNull(args);
+    metadataManager.writeLock().lock();
+    String volumeName = args.getVolumeName();
+    String bucketName = args.getBucketName();
+    String keyName = args.getKeyName();
+    ReplicationFactor factor = args.getFactor();
+    ReplicationType type = args.getType();
+
+    // If user does not specify a replication strategy or
+    // replication factor, KSM will use defaults.
+    if(factor == null) {
+      factor = useRatis ? ReplicationFactor.THREE: ReplicationFactor.ONE;
+    }
+
+    if(type == null) {
+      type = useRatis ? ReplicationType.RATIS : ReplicationType.STAND_ALONE;
+    }
+
+    try {
+      validateBucket(volumeName, bucketName);
+      String objectKey = metadataManager.getKeyWithDBPrefix(
+          volumeName, bucketName, keyName);
+      byte[] openKey = metadataManager.getOpenKeyNameBytes(objectKey, clientID);
+      byte[] keyData = metadataManager.get(openKey);
+      if (keyData == null) {
+        LOG.error("Allocate block for a key not in open status in meta store " +
+            objectKey + " with ID " + clientID);
+        throw new KSMException("Open Key not found",
+            KSMException.ResultCodes.FAILED_KEY_NOT_FOUND);
+      }
+      AllocatedBlock allocatedBlock =
+          scmBlockClient.allocateBlock(scmBlockSize, type, factor, ksmId);
+      KsmKeyInfo keyInfo =
+          KsmKeyInfo.getFromProtobuf(KeyInfo.parseFrom(keyData));
+      KsmKeyLocationInfo info = new KsmKeyLocationInfo.Builder()
+          .setContainerName(allocatedBlock.getPipeline().getContainerName())
+          .setBlockID(allocatedBlock.getKey())
+          .setShouldCreateContainer(allocatedBlock.getCreateContainer())
+          .setLength(scmBlockSize)
+          .setOffset(0)
+          .build();
+      // current version not committed, so new blocks coming now are added to
+      // the same version
+      keyInfo.appendNewBlocks(Collections.singletonList(info));
+      keyInfo.updateModifcationTime();
+      metadataManager.put(openKey, keyInfo.getProtobuf().toByteArray());
+      return info;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public OpenKeySession openKey(KsmKeyArgs args) throws IOException {
+    Preconditions.checkNotNull(args);
+    metadataManager.writeLock().lock();
+    String volumeName = args.getVolumeName();
+    String bucketName = args.getBucketName();
+    String keyName = args.getKeyName();
+    ReplicationFactor factor = args.getFactor();
+    ReplicationType type = args.getType();
+
+    // If user does not specify a replication strategy or
+    // replication factor, KSM will use defaults.
+    if(factor == null) {
+      factor = useRatis ? ReplicationFactor.THREE: ReplicationFactor.ONE;
+    }
+
+    if(type == null) {
+      type = useRatis ? ReplicationType.RATIS : ReplicationType.STAND_ALONE;
+    }
+
+    try {
+      validateBucket(volumeName, bucketName);
+      long requestedSize = Math.min(preallocateMax, args.getDataSize());
+      List<KsmKeyLocationInfo> locations = new ArrayList<>();
+      String objectKey = metadataManager.getKeyWithDBPrefix(
+          volumeName, bucketName, keyName);
+      // requested size is not required but more like a optimization:
+      // SCM looks at the requested, if it 0, no block will be allocated at
+      // the point, if client needs more blocks, client can always call
+      // allocateBlock. But if requested size is not 0, KSM will preallocate
+      // some blocks and piggyback to client, to save RPC calls.
+      while (requestedSize > 0) {
+        long allocateSize = Math.min(scmBlockSize, requestedSize);
+        AllocatedBlock allocatedBlock =
+            scmBlockClient.allocateBlock(allocateSize, type, factor, ksmId);
+        KsmKeyLocationInfo subKeyInfo = new KsmKeyLocationInfo.Builder()
+            .setContainerName(allocatedBlock.getPipeline().getContainerName())
+            .setBlockID(allocatedBlock.getKey())
+            .setShouldCreateContainer(allocatedBlock.getCreateContainer())
+            .setLength(allocateSize)
+            .setOffset(0)
+            .build();
+        locations.add(subKeyInfo);
+        requestedSize -= allocateSize;
+      }
+      // NOTE size of a key is not a hard limit on anything, it is a value that
+      // client should expect, in terms of current size of key. If client sets a
+      // value, then this value is used, otherwise, we allocate a single block
+      // which is the current size, if read by the client.
+      long size = args.getDataSize() >= 0 ? args.getDataSize() : scmBlockSize;
+      byte[] keyKey = metadataManager.getDBKeyBytes(
+          volumeName, bucketName, keyName);
+      byte[] value = metadataManager.get(keyKey);
+      KsmKeyInfo keyInfo;
+      long openVersion;
+      if (value != null) {
+        // the key already exist, the new blocks will be added as new version
+        keyInfo = KsmKeyInfo.getFromProtobuf(KeyInfo.parseFrom(value));
+        // when locations.size = 0, the new version will have identical blocks
+        // as its previous version
+        openVersion = keyInfo.addNewVersion(locations);
+        keyInfo.setDataSize(size + keyInfo.getDataSize());
+      } else {
+        // the key does not exist, create a new object, the new blocks are the
+        // version 0
+        long currentTime = Time.now();
+        keyInfo = new KsmKeyInfo.Builder()
+            .setVolumeName(args.getVolumeName())
+            .setBucketName(args.getBucketName())
+            .setKeyName(args.getKeyName())
+            .setKsmKeyLocationInfos(Collections.singletonList(
+                new KsmKeyLocationInfoGroup(0, locations)))
+            .setCreationTime(currentTime)
+            .setModificationTime(currentTime)
+            .setDataSize(size)
+            .build();
+        openVersion = 0;
+      }
+      // Generate a random ID which is not already in meta db.
+      int id = -1;
+      // in general this should finish in a couple times at most. putting some
+      // arbitrary large number here to avoid dead loop.
+      for (int j = 0; j < 10000; j++) {
+        id = random.nextInt();
+        byte[] openKey = metadataManager.getOpenKeyNameBytes(objectKey, id);
+        if (metadataManager.get(openKey) == null) {
+          metadataManager.put(openKey, keyInfo.getProtobuf().toByteArray());
+          break;
+        }
+      }
+      if (id == -1) {
+        throw new IOException("Failed to find a usable id for " + objectKey);
+      }
+      LOG.debug("Key {} allocated in volume {} bucket {}",
+          keyName, volumeName, bucketName);
+      return new OpenKeySession(id, keyInfo, openVersion);
+    } catch (KSMException e) {
+      throw e;
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Key open failed for volume:{} bucket:{} key:{}",
+            volumeName, bucketName, keyName, ex);
+      }
+      throw new KSMException(ex.getMessage(),
+          KSMException.ResultCodes.FAILED_KEY_ALLOCATION);
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public void commitKey(KsmKeyArgs args, int clientID) throws IOException {
+    Preconditions.checkNotNull(args);
+    metadataManager.writeLock().lock();
+    String volumeName = args.getVolumeName();
+    String bucketName = args.getBucketName();
+    String keyName = args.getKeyName();
+    try {
+      validateBucket(volumeName, bucketName);
+      String objectKey = metadataManager.getKeyWithDBPrefix(
+          volumeName, bucketName, keyName);
+      byte[] objectKeyBytes = metadataManager.getDBKeyBytes(volumeName,
+          bucketName, keyName);
+      byte[] openKey = metadataManager.getOpenKeyNameBytes(objectKey, clientID);
+      byte[] openKeyData = metadataManager.get(openKey);
+      if (openKeyData == null) {
+        throw new KSMException("Commit a key without corresponding entry " +
+            DFSUtil.bytes2String(openKey), ResultCodes.FAILED_KEY_NOT_FOUND);
+      }
+      KsmKeyInfo keyInfo =
+          KsmKeyInfo.getFromProtobuf(KeyInfo.parseFrom(openKeyData));
+      keyInfo.setDataSize(args.getDataSize());
+      keyInfo.setModificationTime(Time.now());
+      BatchOperation batch = new BatchOperation();
+      batch.delete(openKey);
+      batch.put(objectKeyBytes, keyInfo.getProtobuf().toByteArray());
+      metadataManager.writeBatch(batch);
+    } catch (KSMException e) {
+      throw e;
+    } catch (IOException ex) {
+      LOG.error("Key commit failed for volume:{} bucket:{} key:{}",
+          volumeName, bucketName, keyName, ex);
+      throw new KSMException(ex.getMessage(),
+          KSMException.ResultCodes.FAILED_KEY_ALLOCATION);
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public KsmKeyInfo lookupKey(KsmKeyArgs args) throws IOException {
+    Preconditions.checkNotNull(args);
+    metadataManager.writeLock().lock();
+    String volumeName = args.getVolumeName();
+    String bucketName = args.getBucketName();
+    String keyName = args.getKeyName();
+    try {
+      byte[] keyKey = metadataManager.getDBKeyBytes(
+          volumeName, bucketName, keyName);
+      byte[] value = metadataManager.get(keyKey);
+      if (value == null) {
+        LOG.debug("volume:{} bucket:{} Key:{} not found",
+            volumeName, bucketName, keyName);
+        throw new KSMException("Key not found",
+            KSMException.ResultCodes.FAILED_KEY_NOT_FOUND);
+      }
+      return KsmKeyInfo.getFromProtobuf(KeyInfo.parseFrom(value));
+    } catch (DBException ex) {
+      LOG.error("Get key failed for volume:{} bucket:{} key:{}",
+          volumeName, bucketName, keyName, ex);
+      throw new KSMException(ex.getMessage(),
+          KSMException.ResultCodes.FAILED_KEY_NOT_FOUND);
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public void deleteKey(KsmKeyArgs args) throws IOException {
+    Preconditions.checkNotNull(args);
+    metadataManager.writeLock().lock();
+    String volumeName = args.getVolumeName();
+    String bucketName = args.getBucketName();
+    String keyName = args.getKeyName();
+    try {
+      byte[] objectKey = metadataManager.getDBKeyBytes(
+          volumeName, bucketName, keyName);
+      byte[] objectValue = metadataManager.get(objectKey);
+      if (objectValue == null) {
+        throw new KSMException("Key not found",
+            KSMException.ResultCodes.FAILED_KEY_NOT_FOUND);
+      }
+      byte[] deletingKey = metadataManager.getDeletedKeyName(objectKey);
+      BatchOperation batch = new BatchOperation();
+      batch.put(deletingKey, objectValue);
+      batch.delete(objectKey);
+      metadataManager.writeBatch(batch);
+    } catch (DBException ex) {
+      LOG.error(String.format("Delete key failed for volume:%s "
+          + "bucket:%s key:%s", volumeName, bucketName, keyName), ex);
+      throw new KSMException(ex.getMessage(), ex,
+          ResultCodes.FAILED_KEY_DELETION);
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public List<KsmKeyInfo> listKeys(String volumeName, String bucketName,
+      String startKey, String keyPrefix, int maxKeys) throws IOException {
+    Preconditions.checkNotNull(volumeName);
+    Preconditions.checkNotNull(bucketName);
+
+    metadataManager.readLock().lock();
+    try {
+      return metadataManager.listKeys(volumeName, bucketName,
+          startKey, keyPrefix, maxKeys);
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+
+  @Override
+  public List<BlockGroup> getPendingDeletionKeys(final int count)
+      throws IOException {
+    metadataManager.readLock().lock();
+    try {
+      return metadataManager.getPendingDeletionKeys(count);
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+
+  @Override
+  public void deletePendingDeletionKey(String objectKeyName)
+      throws IOException{
+    Preconditions.checkNotNull(objectKeyName);
+    if (!objectKeyName.startsWith(OzoneConsts.DELETING_KEY_PREFIX)) {
+      throw new IllegalArgumentException("Invalid key name,"
+          + " the name should be the key name with deleting prefix");
+    }
+
+    // Simply removes the entry from KSM DB.
+    metadataManager.writeLock().lock();
+    try {
+      byte[] pendingDelKey = DFSUtil.string2Bytes(objectKeyName);
+      byte[] delKeyValue = metadataManager.get(pendingDelKey);
+      if (delKeyValue == null) {
+        throw new IOException("Failed to delete key " + objectKeyName
+            + " because it is not found in DB");
+      }
+      metadataManager.delete(pendingDelKey);
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public List<BlockGroup> getExpiredOpenKeys() throws IOException {
+    metadataManager.readLock().lock();
+    try {
+      return metadataManager.getExpiredOpenKeys();
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+
+  @Override
+  public void deleteExpiredOpenKey(String objectKeyName) throws IOException {
+    Preconditions.checkNotNull(objectKeyName);
+    if (!objectKeyName.startsWith(OzoneConsts.OPEN_KEY_PREFIX)) {
+      throw new IllegalArgumentException("Invalid key name,"
+          + " the name should be the key name with open key prefix");
+    }
+
+    // Simply removes the entry from KSM DB.
+    metadataManager.writeLock().lock();
+    try {
+      byte[] openKey = DFSUtil.string2Bytes(objectKeyName);
+      byte[] delKeyValue = metadataManager.get(openKey);
+      if (delKeyValue == null) {
+        throw new IOException("Failed to delete key " + objectKeyName
+            + " because it is not found in DB");
+      }
+      metadataManager.delete(openKey);
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java
new file mode 100644
index 0000000..76312e7
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java
@@ -0,0 +1,901 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.protobuf.BlockingService;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.ipc.Client;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.hdds.server.ServiceRuntimeInfoImpl;
+import org.apache.hadoop.ozone.common.Storage.StorageState;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.ksm.protocol.KeySpaceManagerProtocol;
+import org.apache.hadoop.ozone.ksm.protocolPB.KeySpaceManagerProtocolPB;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException.ResultCodes;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos
+    .ServicePort;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.ozone.protocolPB
+    .KeySpaceManagerProtocolServerSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .ScmBlockLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB.ScmBlockLocationProtocolPB;
+import org.apache.hadoop.hdds.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.StringUtils;
+
+import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForBlockClients;
+import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients;
+import static org.apache.hadoop.hdds.HddsUtils.isHddsEnabled;
+import static org.apache.hadoop.ozone.KsmUtils.getKsmAddress;
+import static org.apache.hadoop.hdds.server.ServerUtils
+    .updateRPCListenAddress;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.management.ObjectName;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_ADDRESS_KEY;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_HANDLER_COUNT_DEFAULT;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_HANDLER_COUNT_KEY;
+import static org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.KeySpaceManagerService
+    .newReflectiveBlockingService;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos
+    .NodeState.HEALTHY;
+import static org.apache.hadoop.util.ExitUtil.terminate;
+
+/**
+ * Ozone Keyspace manager is the metadata manager of ozone.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "CBLOCK", "OZONE", "HBASE"})
+public final class KeySpaceManager extends ServiceRuntimeInfoImpl
+    implements KeySpaceManagerProtocol, KSMMXBean {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(KeySpaceManager.class);
+
+  private static final String USAGE =
+      "Usage: \n ozone ksm [genericOptions] " + "[ "
+          + StartupOption.CREATEOBJECTSTORE.getName() + " ]\n " + "ozone ksm [ "
+          + StartupOption.HELP.getName() + " ]\n";
+
+  /** Startup options. */
+  public enum StartupOption {
+    CREATEOBJECTSTORE("-createObjectStore"),
+    HELP("-help"),
+    REGULAR("-regular");
+
+    private final String name;
+
+    StartupOption(String arg) {
+      this.name = arg;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public static StartupOption parse(String value) {
+      for (StartupOption option : StartupOption.values()) {
+        if (option.name.equalsIgnoreCase(value)) {
+          return option;
+        }
+      }
+      return null;
+    }
+  }
+
+  private final OzoneConfiguration configuration;
+  private final RPC.Server ksmRpcServer;
+  private final InetSocketAddress ksmRpcAddress;
+  private final KSMMetadataManager metadataManager;
+  private final VolumeManager volumeManager;
+  private final BucketManager bucketManager;
+  private final KeyManager keyManager;
+  private final KSMMetrics metrics;
+  private final KeySpaceManagerHttpServer httpServer;
+  private final KSMStorage ksmStorage;
+  private final ScmBlockLocationProtocol scmBlockClient;
+  private final StorageContainerLocationProtocol scmContainerClient;
+  private ObjectName ksmInfoBeanName;
+
+  private KeySpaceManager(OzoneConfiguration conf) throws IOException {
+    Preconditions.checkNotNull(conf);
+    configuration = conf;
+    ksmStorage = new KSMStorage(conf);
+    scmBlockClient = getScmBlockClient(configuration);
+    scmContainerClient = getScmContainerClient(configuration);
+    if (ksmStorage.getState() != StorageState.INITIALIZED) {
+      throw new KSMException("KSM not initialized.",
+          ResultCodes.KSM_NOT_INITIALIZED);
+    }
+
+    // verifies that the SCM info in the KSM Version file is correct.
+    ScmInfo scmInfo = scmBlockClient.getScmInfo();
+    if (!(scmInfo.getClusterId().equals(ksmStorage.getClusterID()) && scmInfo
+        .getScmId().equals(ksmStorage.getScmId()))) {
+      throw new KSMException("SCM version info mismatch.",
+          ResultCodes.SCM_VERSION_MISMATCH_ERROR);
+    }
+    final int handlerCount = conf.getInt(OZONE_KSM_HANDLER_COUNT_KEY,
+        OZONE_KSM_HANDLER_COUNT_DEFAULT);
+
+    RPC.setProtocolEngine(configuration, KeySpaceManagerProtocolPB.class,
+        ProtobufRpcEngine.class);
+
+    BlockingService ksmService = newReflectiveBlockingService(
+        new KeySpaceManagerProtocolServerSideTranslatorPB(this));
+    final InetSocketAddress ksmNodeRpcAddr =
+        getKsmAddress(configuration);
+    ksmRpcServer = startRpcServer(configuration, ksmNodeRpcAddr,
+        KeySpaceManagerProtocolPB.class, ksmService,
+        handlerCount);
+    ksmRpcAddress = updateRPCListenAddress(configuration,
+        OZONE_KSM_ADDRESS_KEY, ksmNodeRpcAddr, ksmRpcServer);
+    metadataManager = new KSMMetadataManagerImpl(configuration);
+    volumeManager = new VolumeManagerImpl(metadataManager, configuration);
+    bucketManager = new BucketManagerImpl(metadataManager);
+    metrics = KSMMetrics.create();
+    keyManager =
+        new KeyManagerImpl(scmBlockClient, metadataManager, configuration,
+            ksmStorage.getKsmId());
+    httpServer = new KeySpaceManagerHttpServer(configuration, this);
+  }
+
+  /**
+   * Create a scm block client, used by putKey() and getKey().
+   *
+   * @return {@link ScmBlockLocationProtocol}
+   * @throws IOException
+   */
+  private static ScmBlockLocationProtocol getScmBlockClient(
+      OzoneConfiguration conf) throws IOException {
+    RPC.setProtocolEngine(conf, ScmBlockLocationProtocolPB.class,
+        ProtobufRpcEngine.class);
+    long scmVersion =
+        RPC.getProtocolVersion(ScmBlockLocationProtocolPB.class);
+    InetSocketAddress scmBlockAddress =
+        getScmAddressForBlockClients(conf);
+    ScmBlockLocationProtocolClientSideTranslatorPB scmBlockLocationClient =
+        new ScmBlockLocationProtocolClientSideTranslatorPB(
+            RPC.getProxy(ScmBlockLocationProtocolPB.class, scmVersion,
+                scmBlockAddress, UserGroupInformation.getCurrentUser(), conf,
+                NetUtils.getDefaultSocketFactory(conf),
+                Client.getRpcTimeout(conf)));
+    return scmBlockLocationClient;
+  }
+
+  /**
+   * Returns a scm container client.
+   *
+   * @return {@link StorageContainerLocationProtocol}
+   * @throws IOException
+   */
+  private static StorageContainerLocationProtocol getScmContainerClient(
+      OzoneConfiguration conf) throws IOException {
+    RPC.setProtocolEngine(conf, StorageContainerLocationProtocolPB.class,
+        ProtobufRpcEngine.class);
+    long scmVersion =
+        RPC.getProtocolVersion(StorageContainerLocationProtocolPB.class);
+    InetSocketAddress scmAddr = getScmAddressForClients(
+        conf);
+    StorageContainerLocationProtocolClientSideTranslatorPB scmContainerClient =
+        new StorageContainerLocationProtocolClientSideTranslatorPB(
+            RPC.getProxy(StorageContainerLocationProtocolPB.class, scmVersion,
+                scmAddr, UserGroupInformation.getCurrentUser(), conf,
+                NetUtils.getDefaultSocketFactory(conf),
+                Client.getRpcTimeout(conf)));
+    return scmContainerClient;
+  }
+
+  @VisibleForTesting
+  public KeyManager getKeyManager() {
+    return keyManager;
+  }
+
+  @VisibleForTesting
+  public ScmInfo getScmInfo() throws IOException {
+    return scmBlockClient.getScmInfo();
+  }
+
+  @VisibleForTesting
+  public KSMStorage getKsmStorage() {
+    return ksmStorage;
+  }
+  /**
+   * Starts an RPC server, if configured.
+   *
+   * @param conf configuration
+   * @param addr configured address of RPC server
+   * @param protocol RPC protocol provided by RPC server
+   * @param instance RPC protocol implementation instance
+   * @param handlerCount RPC server handler count
+   *
+   * @return RPC server
+   * @throws IOException if there is an I/O error while creating RPC server
+   */
+  private static RPC.Server startRpcServer(OzoneConfiguration conf,
+      InetSocketAddress addr, Class<?> protocol, BlockingService instance,
+      int handlerCount) throws IOException {
+    RPC.Server rpcServer = new RPC.Builder(conf)
+        .setProtocol(protocol)
+        .setInstance(instance)
+        .setBindAddress(addr.getHostString())
+        .setPort(addr.getPort())
+        .setNumHandlers(handlerCount)
+        .setVerbose(false)
+        .setSecretManager(null)
+        .build();
+
+    DFSUtil.addPBProtocol(conf, protocol, instance, rpcServer);
+    return rpcServer;
+  }
+
+  /**
+   * Get metadata manager.
+   * @return metadata manager.
+   */
+  public KSMMetadataManager getMetadataManager() {
+    return metadataManager;
+  }
+
+  public KSMMetrics getMetrics() {
+    return metrics;
+  }
+
+  /**
+   * Main entry point for starting KeySpaceManager.
+   *
+   * @param argv arguments
+   * @throws IOException if startup fails due to I/O error
+   */
+  public static void main(String[] argv) throws IOException {
+    if (DFSUtil.parseHelpArgument(argv, USAGE, System.out, true)) {
+      System.exit(0);
+    }
+    try {
+      OzoneConfiguration conf = new OzoneConfiguration();
+      GenericOptionsParser hParser = new GenericOptionsParser(conf, argv);
+      if (!hParser.isParseSuccessful()) {
+        System.err.println("USAGE: " + USAGE + " \n");
+        hParser.printGenericCommandUsage(System.err);
+        System.exit(1);
+      }
+      StringUtils.startupShutdownMessage(KeySpaceManager.class, argv, LOG);
+      KeySpaceManager ksm = createKSM(hParser.getRemainingArgs(), conf);
+      if (ksm != null) {
+        ksm.start();
+        ksm.join();
+      }
+    } catch (Throwable t) {
+      LOG.error("Failed to start the KeyspaceManager.", t);
+      terminate(1, t);
+    }
+  }
+
+  private static void printUsage(PrintStream out) {
+    out.println(USAGE + "\n");
+  }
+
+  /**
+   * Constructs KSM instance based on command line arguments.
+   * @param argv Command line arguments
+   * @param conf OzoneConfiguration
+   * @return KSM instance
+   * @throws IOException in case KSM instance creation fails.
+   */
+
+  public static KeySpaceManager createKSM(String[] argv,
+      OzoneConfiguration conf) throws IOException {
+    if (!isHddsEnabled(conf)) {
+      System.err.println("KSM cannot be started in secure mode or when " +
+          OZONE_ENABLED + " is set to false");
+      System.exit(1);
+    }
+    StartupOption startOpt = parseArguments(argv);
+    if (startOpt == null) {
+      printUsage(System.err);
+      terminate(1);
+      return null;
+    }
+    switch (startOpt) {
+    case CREATEOBJECTSTORE:
+      terminate(ksmInit(conf) ? 0 : 1);
+      return null;
+    case HELP:
+      printUsage(System.err);
+      terminate(0);
+      return null;
+    default:
+      return new KeySpaceManager(conf);
+    }
+  }
+
+  /**
+   * Initializes the KSM instance.
+   * @param conf OzoneConfiguration
+   * @return true if KSM initialization succeeds , false otherwise
+   * @throws IOException in case ozone metadata directory path is not accessible
+   */
+
+  private static boolean ksmInit(OzoneConfiguration conf) throws IOException {
+    KSMStorage ksmStorage = new KSMStorage(conf);
+    StorageState state = ksmStorage.getState();
+    if (state != StorageState.INITIALIZED) {
+      try {
+        ScmBlockLocationProtocol scmBlockClient = getScmBlockClient(conf);
+        ScmInfo scmInfo = scmBlockClient.getScmInfo();
+        String clusterId = scmInfo.getClusterId();
+        String scmId = scmInfo.getScmId();
+        if (clusterId == null || clusterId.isEmpty()) {
+          throw new IOException("Invalid Cluster ID");
+        }
+        if (scmId == null || scmId.isEmpty()) {
+          throw new IOException("Invalid SCM ID");
+        }
+        ksmStorage.setClusterId(clusterId);
+        ksmStorage.setScmId(scmId);
+        ksmStorage.initialize();
+        System.out.println(
+            "KSM initialization succeeded.Current cluster id for sd="
+                + ksmStorage.getStorageDir() + ";cid=" + ksmStorage
+                .getClusterID());
+        return true;
+      } catch (IOException ioe) {
+        LOG.error("Could not initialize KSM version file", ioe);
+        return false;
+      }
+    } else {
+      System.out.println(
+          "KSM already initialized.Reusing existing cluster id for sd="
+              + ksmStorage.getStorageDir() + ";cid=" + ksmStorage
+              .getClusterID());
+      return true;
+    }
+  }
+
+  /**
+   * Parses the command line options for KSM initialization.
+   * @param args command line arguments
+   * @return StartupOption if options are valid, null otherwise
+   */
+  private static StartupOption parseArguments(String[] args) {
+    if (args == null || args.length == 0) {
+      return StartupOption.REGULAR;
+    } else if (args.length == 1) {
+      return StartupOption.parse(args[0]);
+    }
+    return null;
+  }
+
+  /**
+   * Builds a message for logging startup information about an RPC server.
+   *
+   * @param description RPC server description
+   * @param addr RPC server listening address
+   * @return server startup message
+   */
+  private static String buildRpcServerStartMessage(String description,
+      InetSocketAddress addr) {
+    return addr != null ? String.format("%s is listening at %s",
+        description, addr.toString()) :
+        String.format("%s not started", description);
+  }
+
+  /**
+   * Start service.
+   */
+  public void start() throws IOException {
+    LOG.info(buildRpcServerStartMessage("KeyspaceManager RPC server",
+        ksmRpcAddress));
+    DefaultMetricsSystem.initialize("KeySpaceManager");
+    metadataManager.start();
+    keyManager.start();
+    ksmRpcServer.start();
+    httpServer.start();
+    registerMXBean();
+    setStartTime();
+  }
+
+  /**
+   * Stop service.
+   */
+  public void stop() {
+    try {
+      metadataManager.stop();
+      ksmRpcServer.stop();
+      keyManager.stop();
+      httpServer.stop();
+      metrics.unRegister();
+      unregisterMXBean();
+    } catch (Exception e) {
+      LOG.error("Key Space Manager stop failed.", e);
+    }
+  }
+
+  /**
+   * Wait until service has completed shutdown.
+   */
+  public void join() {
+    try {
+      ksmRpcServer.join();
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      LOG.info("Interrupted during KeyspaceManager join.", e);
+    }
+  }
+
+  /**
+   * Creates a volume.
+   *
+   * @param args - Arguments to create Volume.
+   * @throws IOException
+   */
+  @Override
+  public void createVolume(KsmVolumeArgs args) throws IOException {
+    try {
+      metrics.incNumVolumeCreates();
+      volumeManager.createVolume(args);
+    } catch (Exception ex) {
+      metrics.incNumVolumeCreateFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Changes the owner of a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param owner - Name of the owner.
+   * @throws IOException
+   */
+  @Override
+  public void setOwner(String volume, String owner) throws IOException {
+    try {
+      metrics.incNumVolumeUpdates();
+      volumeManager.setOwner(volume, owner);
+    } catch (Exception ex) {
+      metrics.incNumVolumeUpdateFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Changes the Quota on a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param quota - Quota in bytes.
+   * @throws IOException
+   */
+  @Override
+  public void setQuota(String volume, long quota) throws IOException {
+    try {
+      metrics.incNumVolumeUpdates();
+      volumeManager.setQuota(volume, quota);
+    } catch (Exception ex) {
+      metrics.incNumVolumeUpdateFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Checks if the specified user can access this volume.
+   *
+   * @param volume - volume
+   * @param userAcl - user acls which needs to be checked for access
+   * @return true if the user has required access for the volume,
+   *         false otherwise
+   * @throws IOException
+   */
+  @Override
+  public boolean checkVolumeAccess(String volume, OzoneAclInfo userAcl)
+      throws IOException {
+    try {
+      metrics.incNumVolumeCheckAccesses();
+      return volumeManager.checkVolumeAccess(volume, userAcl);
+    } catch (Exception ex) {
+      metrics.incNumVolumeCheckAccessFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Gets the volume information.
+   *
+   * @param volume - Volume name.
+   * @return VolumeArgs or exception is thrown.
+   * @throws IOException
+   */
+  @Override
+  public KsmVolumeArgs getVolumeInfo(String volume) throws IOException {
+    try {
+      metrics.incNumVolumeInfos();
+      return volumeManager.getVolumeInfo(volume);
+    } catch (Exception ex) {
+      metrics.incNumVolumeInfoFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Deletes an existing empty volume.
+   *
+   * @param volume - Name of the volume.
+   * @throws IOException
+   */
+  @Override
+  public void deleteVolume(String volume) throws IOException {
+    try {
+      metrics.incNumVolumeDeletes();
+      volumeManager.deleteVolume(volume);
+    } catch (Exception ex) {
+      metrics.incNumVolumeDeleteFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Lists volume owned by a specific user.
+   *
+   * @param userName - user name
+   * @param prefix - Filter prefix -- Return only entries that match this.
+   * @param prevKey - Previous key -- List starts from the next from the
+   * prevkey
+   * @param maxKeys - Max number of keys to return.
+   * @return List of Volumes.
+   * @throws IOException
+   */
+  @Override
+  public List<KsmVolumeArgs> listVolumeByUser(String userName, String prefix,
+      String prevKey, int maxKeys) throws IOException {
+    try {
+      metrics.incNumVolumeLists();
+      return volumeManager.listVolumes(userName, prefix, prevKey, maxKeys);
+    } catch (Exception ex) {
+      metrics.incNumVolumeListFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Lists volume all volumes in the cluster.
+   *
+   * @param prefix - Filter prefix -- Return only entries that match this.
+   * @param prevKey - Previous key -- List starts from the next from the
+   * prevkey
+   * @param maxKeys - Max number of keys to return.
+   * @return List of Volumes.
+   * @throws IOException
+   */
+  @Override
+  public List<KsmVolumeArgs> listAllVolumes(String prefix, String prevKey, int
+      maxKeys) throws IOException {
+    try {
+      metrics.incNumVolumeLists();
+      return volumeManager.listVolumes(null, prefix, prevKey, maxKeys);
+    } catch (Exception ex) {
+      metrics.incNumVolumeListFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Creates a bucket.
+   *
+   * @param bucketInfo - BucketInfo to create bucket.
+   * @throws IOException
+   */
+  @Override
+  public void createBucket(KsmBucketInfo bucketInfo) throws IOException {
+    try {
+      metrics.incNumBucketCreates();
+      bucketManager.createBucket(bucketInfo);
+    } catch (Exception ex) {
+      metrics.incNumBucketCreateFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<KsmBucketInfo> listBuckets(String volumeName,
+      String startKey, String prefix, int maxNumOfBuckets)
+      throws IOException {
+    try {
+      metrics.incNumBucketLists();
+      return bucketManager.listBuckets(volumeName,
+          startKey, prefix, maxNumOfBuckets);
+    } catch (IOException ex) {
+      metrics.incNumBucketListFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Gets the bucket information.
+   *
+   * @param volume - Volume name.
+   * @param bucket - Bucket name.
+   * @return KsmBucketInfo or exception is thrown.
+   * @throws IOException
+   */
+  @Override
+  public KsmBucketInfo getBucketInfo(String volume, String bucket)
+      throws IOException {
+    try {
+      metrics.incNumBucketInfos();
+      return bucketManager.getBucketInfo(volume, bucket);
+    } catch (Exception ex) {
+      metrics.incNumBucketInfoFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Allocate a key.
+   *
+   * @param args - attributes of the key.
+   * @return KsmKeyInfo - the info about the allocated key.
+   * @throws IOException
+   */
+  @Override
+  public OpenKeySession openKey(KsmKeyArgs args) throws IOException {
+    try {
+      metrics.incNumKeyAllocates();
+      return keyManager.openKey(args);
+    } catch (Exception ex) {
+      metrics.incNumKeyAllocateFails();
+      throw ex;
+    }
+  }
+
+  @Override
+  public void commitKey(KsmKeyArgs args, int clientID)
+      throws IOException {
+    try {
+      metrics.incNumKeyCommits();
+      keyManager.commitKey(args, clientID);
+    } catch (Exception ex) {
+      metrics.incNumKeyCommitFails();
+      throw ex;
+    }
+  }
+
+  @Override
+  public KsmKeyLocationInfo  allocateBlock(KsmKeyArgs args, int clientID)
+      throws IOException {
+    try {
+      metrics.incNumBlockAllocateCalls();
+      return keyManager.allocateBlock(args, clientID);
+    } catch (Exception ex) {
+      metrics.incNumBlockAllocateCallFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Lookup a key.
+   *
+   * @param args - attributes of the key.
+   * @return KsmKeyInfo - the info about the requested key.
+   * @throws IOException
+   */
+  @Override
+  public KsmKeyInfo lookupKey(KsmKeyArgs args) throws IOException {
+    try {
+      metrics.incNumKeyLookups();
+      return keyManager.lookupKey(args);
+    } catch (Exception ex) {
+      metrics.incNumKeyLookupFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Deletes an existing key.
+   *
+   * @param args - attributes of the key.
+   * @throws IOException
+   */
+  @Override
+  public void deleteKey(KsmKeyArgs args) throws IOException {
+    try {
+      metrics.incNumKeyDeletes();
+      keyManager.deleteKey(args);
+    } catch (Exception ex) {
+      metrics.incNumKeyDeleteFails();
+      throw ex;
+    }
+  }
+
+  @Override
+  public List<KsmKeyInfo> listKeys(String volumeName, String bucketName,
+      String startKey, String keyPrefix, int maxKeys) throws IOException {
+    try {
+      metrics.incNumKeyLists();
+      return keyManager.listKeys(volumeName, bucketName,
+          startKey, keyPrefix, maxKeys);
+    } catch (IOException ex) {
+      metrics.incNumKeyListFails();
+      throw ex;
+    }
+  }
+
+  /**
+   * Sets bucket property from args.
+   * @param args - BucketArgs.
+   * @throws IOException
+   */
+  @Override
+  public void setBucketProperty(KsmBucketArgs args)
+      throws IOException {
+    try {
+      metrics.incNumBucketUpdates();
+      bucketManager.setBucketProperty(args);
+    } catch (Exception ex) {
+      metrics.incNumBucketUpdateFails();
+      throw ex;
+    }
+  }
+
+
+  /**
+   * Deletes an existing empty bucket from volume.
+   * @param volume - Name of the volume.
+   * @param bucket - Name of the bucket.
+   * @throws IOException
+   */
+  public void deleteBucket(String volume, String bucket) throws IOException {
+    try {
+      metrics.incNumBucketDeletes();
+      bucketManager.deleteBucket(volume, bucket);
+    } catch (Exception ex) {
+      metrics.incNumBucketDeleteFails();
+      throw ex;
+    }
+  }
+
+  private void registerMXBean() {
+    Map<String, String> jmxProperties = new HashMap<String, String>();
+    jmxProperties.put("component", "ServerRuntime");
+    this.ksmInfoBeanName =
+        MBeans.register("KeySpaceManager",
+            "KeySpaceManagerInfo",
+            jmxProperties,
+            this);
+  }
+
+  private void unregisterMXBean() {
+    if (this.ksmInfoBeanName != null) {
+      MBeans.unregister(this.ksmInfoBeanName);
+      this.ksmInfoBeanName = null;
+    }
+  }
+
+  @Override
+  public String getRpcPort() {
+    return "" + ksmRpcAddress.getPort();
+  }
+
+  @VisibleForTesting
+  public KeySpaceManagerHttpServer getHttpServer() {
+    return httpServer;
+  }
+
+  @Override
+  public List<ServiceInfo> getServiceList() throws IOException {
+    // When we implement multi-home this call has to be handled properly.
+    List<ServiceInfo> services = new ArrayList<>();
+    ServiceInfo.Builder ksmServiceInfoBuilder = ServiceInfo.newBuilder()
+        .setNodeType(HddsProtos.NodeType.KSM)
+        .setHostname(ksmRpcAddress.getHostName())
+        .addServicePort(ServicePort.newBuilder()
+                .setType(ServicePort.Type.RPC)
+                .setValue(ksmRpcAddress.getPort())
+            .build());
+    if (httpServer.getHttpAddress() != null) {
+      ksmServiceInfoBuilder.addServicePort(ServicePort.newBuilder()
+          .setType(ServicePort.Type.HTTP)
+          .setValue(httpServer.getHttpAddress().getPort())
+          .build());
+    }
+    if (httpServer.getHttpsAddress() != null) {
+      ksmServiceInfoBuilder.addServicePort(ServicePort.newBuilder()
+          .setType(ServicePort.Type.HTTPS)
+          .setValue(httpServer.getHttpsAddress().getPort())
+          .build());
+    }
+    services.add(ksmServiceInfoBuilder.build());
+
+    // For client we have to return SCM with container protocol port,
+    // not block protocol.
+    InetSocketAddress scmAddr = getScmAddressForClients(
+        configuration);
+    ServiceInfo.Builder scmServiceInfoBuilder = ServiceInfo.newBuilder()
+        .setNodeType(HddsProtos.NodeType.SCM)
+        .setHostname(scmAddr.getHostName())
+        .addServicePort(ServicePort.newBuilder()
+            .setType(ServicePort.Type.RPC)
+            .setValue(scmAddr.getPort()).build());
+    services.add(scmServiceInfoBuilder.build());
+
+    List<HddsProtos.Node> nodes = scmContainerClient.queryNode(
+        EnumSet.of(HEALTHY), HddsProtos.QueryScope.CLUSTER, "")
+        .getNodesList();
+
+    for (HddsProtos.Node node : nodes) {
+      HddsProtos.DatanodeDetailsProto datanode = node.getNodeID();
+
+      ServiceInfo.Builder dnServiceInfoBuilder = ServiceInfo.newBuilder()
+          .setNodeType(HddsProtos.NodeType.DATANODE)
+          .setHostname(datanode.getHostName());
+
+      dnServiceInfoBuilder.addServicePort(ServicePort.newBuilder()
+          .setType(ServicePort.Type.HTTP)
+          .setValue(datanode.getOzoneRestPort())
+          .build());
+
+      services.add(dnServiceInfoBuilder.build());
+    }
+
+    metrics.incNumGetServiceLists();
+    // For now there is no exception that can can happen in this call,
+    // so failure metrics is not handled. In future if there is any need to
+    // handle exception in this method, we need to incorporate
+    // metrics.incNumGetServiceListFails()
+    return services;
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManagerHttpServer.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManagerHttpServer.java
new file mode 100644
index 0000000..478804b
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManagerHttpServer.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.hdds.server.BaseHttpServer;
+
+import java.io.IOException;
+
+/**
+ * HttpServer wrapper for the KeySpaceManager.
+ */
+public class KeySpaceManagerHttpServer extends BaseHttpServer {
+
+  public KeySpaceManagerHttpServer(Configuration conf, KeySpaceManager ksm)
+      throws IOException {
+    super(conf, "ksm");
+    addServlet("serviceList", "/serviceList", ServiceListJSONServlet.class);
+    getWebAppContext().setAttribute(OzoneConsts.KSM_CONTEXT_ATTRIBUTE, ksm);
+  }
+
+  @Override protected String getHttpAddressKey() {
+    return KSMConfigKeys.OZONE_KSM_HTTP_ADDRESS_KEY;
+  }
+
+  @Override protected String getHttpBindHostKey() {
+    return KSMConfigKeys.OZONE_KSM_HTTP_BIND_HOST_KEY;
+  }
+
+  @Override protected String getHttpsAddressKey() {
+    return KSMConfigKeys.OZONE_KSM_HTTPS_ADDRESS_KEY;
+  }
+
+  @Override protected String getHttpsBindHostKey() {
+    return KSMConfigKeys.OZONE_KSM_HTTPS_BIND_HOST_KEY;
+  }
+
+  @Override protected String getBindHostDefault() {
+    return KSMConfigKeys.OZONE_KSM_HTTP_BIND_HOST_DEFAULT;
+  }
+
+  @Override protected int getHttpBindPortDefault() {
+    return KSMConfigKeys.OZONE_KSM_HTTP_BIND_PORT_DEFAULT;
+  }
+
+  @Override protected int getHttpsBindPortDefault() {
+    return KSMConfigKeys.OZONE_KSM_HTTPS_BIND_PORT_DEFAULT;
+  }
+
+  @Override protected String getKeytabFile() {
+    return KSMConfigKeys.OZONE_KSM_KEYTAB_FILE;
+  }
+
+  @Override protected String getSpnegoPrincipal() {
+    return OzoneConfigKeys.OZONE_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL;
+  }
+
+  @Override protected String getEnabledKey() {
+    return KSMConfigKeys.OZONE_KSM_HTTP_ENABLED_KEY;
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/OpenKeyCleanupService.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/OpenKeyCleanupService.java
new file mode 100644
index 0000000..7a2d7cc
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/OpenKeyCleanupService.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.utils.BackgroundService;
+import org.apache.hadoop.utils.BackgroundTask;
+import org.apache.hadoop.utils.BackgroundTaskQueue;
+import org.apache.hadoop.utils.BackgroundTaskResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * This is the background service to delete hanging open keys.
+ * Scan the metadata of ksm periodically to get
+ * the keys with prefix "#open#" and ask scm to
+ * delete metadata accordingly, if scm returns
+ * success for keys, then clean up those keys.
+ */
+public class OpenKeyCleanupService extends BackgroundService {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OpenKeyCleanupService.class);
+
+  private final static int OPEN_KEY_DELETING_CORE_POOL_SIZE = 2;
+
+  private final KeyManager keyManager;
+  private final ScmBlockLocationProtocol scmClient;
+
+  public OpenKeyCleanupService(ScmBlockLocationProtocol scmClient,
+      KeyManager keyManager, int serviceInterval,
+      long serviceTimeout) {
+    super("OpenKeyCleanupService", serviceInterval, TimeUnit.SECONDS,
+        OPEN_KEY_DELETING_CORE_POOL_SIZE, serviceTimeout);
+    this.keyManager = keyManager;
+    this.scmClient = scmClient;
+  }
+
+  @Override
+  public BackgroundTaskQueue getTasks() {
+    BackgroundTaskQueue queue = new BackgroundTaskQueue();
+    queue.add(new OpenKeyDeletingTask());
+    return queue;
+  }
+
+  private class OpenKeyDeletingTask
+      implements BackgroundTask<BackgroundTaskResult> {
+
+    @Override
+    public int getPriority() {
+      return 0;
+    }
+
+    @Override
+    public BackgroundTaskResult call() throws Exception {
+      try {
+        List<BlockGroup> keyBlocksList = keyManager.getExpiredOpenKeys();
+        if (keyBlocksList.size() > 0) {
+          int toDeleteSize = keyBlocksList.size();
+          LOG.debug("Found {} to-delete open keys in KSM", toDeleteSize);
+          List<DeleteBlockGroupResult> results =
+              scmClient.deleteKeyBlocks(keyBlocksList);
+          int deletedSize = 0;
+          for (DeleteBlockGroupResult result : results) {
+            if (result.isSuccess()) {
+              try {
+                keyManager.deleteExpiredOpenKey(result.getObjectKey());
+                LOG.debug("Key {} deleted from KSM DB", result.getObjectKey());
+                deletedSize += 1;
+              } catch (IOException e) {
+                LOG.warn("Failed to delete hanging-open key {}",
+                    result.getObjectKey(), e);
+              }
+            } else {
+              LOG.warn("Deleting open Key {} failed because some of the blocks"
+                      + " were failed to delete, failed blocks: {}",
+                  result.getObjectKey(),
+                  String.join(",", result.getFailedBlocks()));
+            }
+          }
+          LOG.info("Found {} expired open key entries, successfully " +
+              "cleaned up {} entries", toDeleteSize, deletedSize);
+          return results::size;
+        } else {
+          LOG.debug("No hanging open key fond in KSM");
+        }
+      } catch (IOException e) {
+        LOG.error("Unable to get hanging open keys, retry in"
+            + " next interval", e);
+      }
+      return BackgroundTaskResult.EmptyTaskResult.newResult();
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/ServiceListJSONServlet.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/ServiceListJSONServlet.java
new file mode 100644
index 0000000..34a80ce
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/ServiceListJSONServlet.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.PrintWriter;
+
+
+/**
+ * Provides REST access to Ozone Service List.
+ * <p>
+ * This servlet generally will be placed under the /serviceList URL of
+ * KeySpaceManager HttpServer.
+ *
+ * The return format is of JSON and in the form
+ * <p>
+ *  <code><pre>
+ *  {
+ *    "services" : [
+ *      {
+ *        "NodeType":"KSM",
+ *        "Hostname" "$hostname",
+ *        "ports" : {
+ *          "$PortType" : "$port",
+ *          ...
+ *        }
+ *      }
+ *    ]
+ *  }
+ *  </pre></code>
+ *  <p>
+ *
+ */
+public class ServiceListJSONServlet  extends HttpServlet  {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ServiceListJSONServlet.class);
+  private static final long serialVersionUID = 1L;
+
+  private KeySpaceManager ksm;
+
+  public void init() throws ServletException {
+    this.ksm = (KeySpaceManager) getServletContext()
+        .getAttribute(OzoneConsts.KSM_CONTEXT_ATTRIBUTE);
+  }
+
+  /**
+   * Process a GET request for the specified resource.
+   *
+   * @param request
+   *          The servlet request we are processing
+   * @param response
+   *          The servlet response we are creating
+   */
+  @Override
+  public void doGet(HttpServletRequest request, HttpServletResponse response) {
+    try {
+      ObjectMapper objectMapper = new ObjectMapper();
+      objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
+      response.setContentType("application/json; charset=utf8");
+      PrintWriter writer = response.getWriter();
+      try {
+        writer.write(objectMapper.writeValueAsString(ksm.getServiceList()));
+      } finally {
+        if (writer != null) {
+          writer.close();
+        }
+      }
+    } catch (IOException e) {
+      LOG.error(
+          "Caught an exception while processing ServiceList request", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManager.java
new file mode 100644
index 0000000..6ac78d6
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManager.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * KSM volume manager interface.
+ */
+public interface VolumeManager {
+
+  /**
+   * Create a new volume.
+   * @param args - Volume args to create a volume
+   */
+  void createVolume(KsmVolumeArgs args) throws IOException;
+
+  /**
+   * Changes the owner of a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param owner - Name of the owner.
+   * @throws IOException
+   */
+  void setOwner(String volume, String owner) throws IOException;
+
+  /**
+   * Changes the Quota on a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param quota - Quota in bytes.
+   * @throws IOException
+   */
+  void setQuota(String volume, long quota) throws IOException;
+
+  /**
+   * Gets the volume information.
+   * @param volume - Volume name.
+   * @return VolumeArgs or exception is thrown.
+   * @throws IOException
+   */
+  KsmVolumeArgs getVolumeInfo(String volume) throws IOException;
+
+  /**
+   * Deletes an existing empty volume.
+   *
+   * @param volume - Name of the volume.
+   * @throws IOException
+   */
+  void deleteVolume(String volume) throws IOException;
+
+  /**
+   * Checks if the specified user with a role can access this volume.
+   *
+   * @param volume - volume
+   * @param userAcl - user acl which needs to be checked for access
+   * @return true if the user has access for the volume, false otherwise
+   * @throws IOException
+   */
+  boolean checkVolumeAccess(String volume, OzoneAclInfo userAcl)
+      throws IOException;
+
+  /**
+   * Returns a list of volumes owned by a given user; if user is null,
+   * returns all volumes.
+   *
+   * @param userName
+   *   volume owner
+   * @param prefix
+   *   the volume prefix used to filter the listing result.
+   * @param startKey
+   *   the start volume name determines where to start listing from,
+   *   this key is excluded from the result.
+   * @param maxKeys
+   *   the maximum number of volumes to return.
+   * @return a list of {@link KsmVolumeArgs}
+   * @throws IOException
+   */
+  List<KsmVolumeArgs> listVolumes(String userName, String prefix,
+      String startKey, int maxKeys) throws IOException;
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManagerImpl.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManagerImpl.java
new file mode 100644
index 0000000..cc2f78a
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/VolumeManagerImpl.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.VolumeList;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.VolumeInfo;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.util.Time;
+import org.apache.hadoop.utils.BatchOperation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_USER_MAX_VOLUME_DEFAULT;
+import static org.apache.hadoop.ozone.ksm.KSMConfigKeys
+    .OZONE_KSM_USER_MAX_VOLUME;
+import static org.apache.hadoop.ozone.ksm.exceptions
+    .KSMException.ResultCodes;
+
+/**
+ * KSM volume management code.
+ */
+public class VolumeManagerImpl implements VolumeManager {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(VolumeManagerImpl.class);
+
+  private final KSMMetadataManager metadataManager;
+  private final int maxUserVolumeCount;
+
+  /**
+   * Constructor.
+   * @param conf - Ozone configuration.
+   * @throws IOException
+   */
+  public VolumeManagerImpl(KSMMetadataManager metadataManager,
+      OzoneConfiguration conf) throws IOException {
+    this.metadataManager = metadataManager;
+    this.maxUserVolumeCount = conf.getInt(OZONE_KSM_USER_MAX_VOLUME,
+        OZONE_KSM_USER_MAX_VOLUME_DEFAULT);
+  }
+
+  // Helpers to add and delete volume from user list
+  private void addVolumeToOwnerList(String volume, String owner,
+      BatchOperation batchOperation) throws IOException {
+    // Get the volume list
+    byte[] dbUserKey = metadataManager.getUserKey(owner);
+    byte[] volumeList  = metadataManager.get(dbUserKey);
+    List<String> prevVolList = new LinkedList<>();
+    if (volumeList != null) {
+      VolumeList vlist = VolumeList.parseFrom(volumeList);
+      prevVolList.addAll(vlist.getVolumeNamesList());
+    }
+
+    // Check the volume count
+    if (prevVolList.size() >= maxUserVolumeCount) {
+      LOG.debug("Too many volumes for user:{}", owner);
+      throw new KSMException(ResultCodes.FAILED_TOO_MANY_USER_VOLUMES);
+    }
+
+    // Add the new volume to the list
+    prevVolList.add(volume);
+    VolumeList newVolList = VolumeList.newBuilder()
+        .addAllVolumeNames(prevVolList).build();
+    batchOperation.put(dbUserKey, newVolList.toByteArray());
+  }
+
+  private void delVolumeFromOwnerList(String volume, String owner,
+                                      BatchOperation batchOperation)
+      throws IOException {
+    // Get the volume list
+    byte[] dbUserKey = metadataManager.getUserKey(owner);
+    byte[] volumeList  = metadataManager.get(dbUserKey);
+    List<String> prevVolList = new LinkedList<>();
+    if (volumeList != null) {
+      VolumeList vlist = VolumeList.parseFrom(volumeList);
+      prevVolList.addAll(vlist.getVolumeNamesList());
+    } else {
+      LOG.debug("volume:{} not found for user:{}");
+      throw new KSMException(ResultCodes.FAILED_USER_NOT_FOUND);
+    }
+
+    // Remove the volume from the list
+    prevVolList.remove(volume);
+    if (prevVolList.size() == 0) {
+      batchOperation.delete(dbUserKey);
+    } else {
+      VolumeList newVolList = VolumeList.newBuilder()
+          .addAllVolumeNames(prevVolList).build();
+      batchOperation.put(dbUserKey, newVolList.toByteArray());
+    }
+  }
+
+  /**
+   * Creates a volume.
+   * @param args - KsmVolumeArgs.
+   */
+  @Override
+  public void createVolume(KsmVolumeArgs args) throws IOException {
+    Preconditions.checkNotNull(args);
+    metadataManager.writeLock().lock();
+    try {
+      byte[] dbVolumeKey = metadataManager.getVolumeKey(args.getVolume());
+      byte[] volumeInfo = metadataManager.get(dbVolumeKey);
+
+      // Check of the volume already exists
+      if (volumeInfo != null) {
+        LOG.debug("volume:{} already exists", args.getVolume());
+        throw new KSMException(ResultCodes.FAILED_VOLUME_ALREADY_EXISTS);
+      }
+
+      BatchOperation batch = new BatchOperation();
+      // Write the vol info
+      List<HddsProtos.KeyValue> metadataList = new LinkedList<>();
+      for (Map.Entry<String, String> entry : args.getKeyValueMap().entrySet()) {
+        metadataList.add(HddsProtos.KeyValue.newBuilder()
+            .setKey(entry.getKey()).setValue(entry.getValue()).build());
+      }
+      List<OzoneAclInfo> aclList = args.getAclMap().ozoneAclGetProtobuf();
+
+      VolumeInfo newVolumeInfo = VolumeInfo.newBuilder()
+          .setAdminName(args.getAdminName())
+          .setOwnerName(args.getOwnerName())
+          .setVolume(args.getVolume())
+          .setQuotaInBytes(args.getQuotaInBytes())
+          .addAllMetadata(metadataList)
+          .addAllVolumeAcls(aclList)
+          .setCreationTime(Time.now())
+          .build();
+      batch.put(dbVolumeKey, newVolumeInfo.toByteArray());
+
+      // Add volume to user list
+      addVolumeToOwnerList(args.getVolume(), args.getOwnerName(), batch);
+      metadataManager.writeBatch(batch);
+      LOG.debug("created volume:{} user:{}", args.getVolume(),
+          args.getOwnerName());
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Volume creation failed for user:{} volume:{}",
+            args.getOwnerName(), args.getVolume(), ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Changes the owner of a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param owner - Name of the owner.
+   * @throws IOException
+   */
+  @Override
+  public void setOwner(String volume, String owner) throws IOException {
+    Preconditions.checkNotNull(volume);
+    Preconditions.checkNotNull(owner);
+    metadataManager.writeLock().lock();
+    try {
+      byte[] dbVolumeKey = metadataManager.getVolumeKey(volume);
+      byte[] volInfo = metadataManager.get(dbVolumeKey);
+      if (volInfo == null) {
+        LOG.debug("Changing volume ownership failed for user:{} volume:{}",
+            owner, volume);
+        throw  new KSMException(ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+
+      VolumeInfo volumeInfo = VolumeInfo.parseFrom(volInfo);
+      KsmVolumeArgs volumeArgs = KsmVolumeArgs.getFromProtobuf(volumeInfo);
+      Preconditions.checkState(volume.equals(volumeInfo.getVolume()));
+
+      BatchOperation batch = new BatchOperation();
+      delVolumeFromOwnerList(volume, volumeArgs.getOwnerName(), batch);
+      addVolumeToOwnerList(volume, owner, batch);
+
+      KsmVolumeArgs newVolumeArgs =
+          KsmVolumeArgs.newBuilder().setVolume(volumeArgs.getVolume())
+              .setAdminName(volumeArgs.getAdminName())
+              .setOwnerName(owner)
+              .setQuotaInBytes(volumeArgs.getQuotaInBytes())
+              .setCreationTime(volumeArgs.getCreationTime())
+              .build();
+
+      VolumeInfo newVolumeInfo = newVolumeArgs.getProtobuf();
+      batch.put(dbVolumeKey, newVolumeInfo.toByteArray());
+
+      metadataManager.writeBatch(batch);
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Changing volume ownership failed for user:{} volume:{}",
+            owner, volume, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Changes the Quota on a volume.
+   *
+   * @param volume - Name of the volume.
+   * @param quota - Quota in bytes.
+   * @throws IOException
+   */
+  public void setQuota(String volume, long quota) throws IOException {
+    Preconditions.checkNotNull(volume);
+    metadataManager.writeLock().lock();
+    try {
+      byte[] dbVolumeKey = metadataManager.getVolumeKey(volume);
+      byte[] volInfo = metadataManager.get(dbVolumeKey);
+      if (volInfo == null) {
+        LOG.debug("volume:{} does not exist", volume);
+        throw new KSMException(ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+
+      VolumeInfo volumeInfo = VolumeInfo.parseFrom(volInfo);
+      KsmVolumeArgs volumeArgs = KsmVolumeArgs.getFromProtobuf(volumeInfo);
+      Preconditions.checkState(volume.equals(volumeInfo.getVolume()));
+
+      KsmVolumeArgs newVolumeArgs =
+          KsmVolumeArgs.newBuilder()
+              .setVolume(volumeArgs.getVolume())
+              .setAdminName(volumeArgs.getAdminName())
+              .setOwnerName(volumeArgs.getOwnerName())
+              .setQuotaInBytes(quota)
+              .setCreationTime(volumeArgs.getCreationTime()).build();
+
+      VolumeInfo newVolumeInfo = newVolumeArgs.getProtobuf();
+      metadataManager.put(dbVolumeKey, newVolumeInfo.toByteArray());
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Changing volume quota failed for volume:{} quota:{}", volume,
+            quota, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Gets the volume information.
+   * @param volume - Volume name.
+   * @return VolumeArgs or exception is thrown.
+   * @throws IOException
+   */
+  public KsmVolumeArgs getVolumeInfo(String volume) throws IOException {
+    Preconditions.checkNotNull(volume);
+    metadataManager.readLock().lock();
+    try {
+      byte[] dbVolumeKey = metadataManager.getVolumeKey(volume);
+      byte[] volInfo = metadataManager.get(dbVolumeKey);
+      if (volInfo == null) {
+        LOG.debug("volume:{} does not exist", volume);
+        throw new KSMException(ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+
+      VolumeInfo volumeInfo = VolumeInfo.parseFrom(volInfo);
+      KsmVolumeArgs volumeArgs = KsmVolumeArgs.getFromProtobuf(volumeInfo);
+      Preconditions.checkState(volume.equals(volumeInfo.getVolume()));
+      return volumeArgs;
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.warn("Info volume failed for volume:{}", volume, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+
+  /**
+   * Deletes an existing empty volume.
+   *
+   * @param volume - Name of the volume.
+   * @throws IOException
+   */
+  @Override
+  public void deleteVolume(String volume) throws IOException {
+    Preconditions.checkNotNull(volume);
+    metadataManager.writeLock().lock();
+    try {
+      BatchOperation batch = new BatchOperation();
+      byte[] dbVolumeKey = metadataManager.getVolumeKey(volume);
+      byte[] volInfo = metadataManager.get(dbVolumeKey);
+      if (volInfo == null) {
+        LOG.debug("volume:{} does not exist", volume);
+        throw new KSMException(ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+
+      if (!metadataManager.isVolumeEmpty(volume)) {
+        LOG.debug("volume:{} is not empty", volume);
+        throw new KSMException(ResultCodes.FAILED_VOLUME_NOT_EMPTY);
+      }
+
+      VolumeInfo volumeInfo = VolumeInfo.parseFrom(volInfo);
+      Preconditions.checkState(volume.equals(volumeInfo.getVolume()));
+      // delete the volume from the owner list
+      // as well as delete the volume entry
+      delVolumeFromOwnerList(volume, volumeInfo.getOwnerName(), batch);
+      batch.delete(dbVolumeKey);
+      metadataManager.writeBatch(batch);
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Delete volume failed for volume:{}", volume, ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.writeLock().unlock();
+    }
+  }
+
+  /**
+   * Checks if the specified user with a role can access this volume.
+   *
+   * @param volume - volume
+   * @param userAcl - user acl which needs to be checked for access
+   * @return true if the user has access for the volume, false otherwise
+   * @throws IOException
+   */
+  public boolean checkVolumeAccess(String volume, OzoneAclInfo userAcl)
+      throws IOException {
+    Preconditions.checkNotNull(volume);
+    Preconditions.checkNotNull(userAcl);
+    metadataManager.readLock().lock();
+    try {
+      byte[] dbVolumeKey = metadataManager.getVolumeKey(volume);
+      byte[] volInfo = metadataManager.get(dbVolumeKey);
+      if (volInfo == null) {
+        LOG.debug("volume:{} does not exist", volume);
+        throw  new KSMException(ResultCodes.FAILED_VOLUME_NOT_FOUND);
+      }
+
+      VolumeInfo volumeInfo = VolumeInfo.parseFrom(volInfo);
+      KsmVolumeArgs volumeArgs = KsmVolumeArgs.getFromProtobuf(volumeInfo);
+      Preconditions.checkState(volume.equals(volumeInfo.getVolume()));
+      return volumeArgs.getAclMap().hasAccess(userAcl);
+    } catch (IOException ex) {
+      if (!(ex instanceof KSMException)) {
+        LOG.error("Check volume access failed for volume:{} user:{} rights:{}",
+            volume, userAcl.getName(), userAcl.getRights(), ex);
+      }
+      throw ex;
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<KsmVolumeArgs> listVolumes(String userName,
+      String prefix, String startKey, int maxKeys) throws IOException {
+    metadataManager.readLock().lock();
+    try {
+      return metadataManager.listVolumes(
+          userName, prefix, startKey, maxKeys);
+    } finally {
+      metadataManager.readLock().unlock();
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/exceptions/KSMException.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/exceptions/KSMException.java
new file mode 100644
index 0000000..e2f3580
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/exceptions/KSMException.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm.exceptions;
+
+import java.io.IOException;
+
+/**
+ * Exception thrown by KSM.
+ */
+public class KSMException extends IOException {
+  private final KSMException.ResultCodes result;
+
+  /**
+   * Constructs an {@code IOException} with {@code null}
+   * as its error detail message.
+   */
+  public KSMException(KSMException.ResultCodes result) {
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified detail message.
+   *
+   * @param message The detail message (which is saved for later retrieval by
+   * the
+   * {@link #getMessage()} method)
+   */
+  public KSMException(String message, KSMException.ResultCodes result) {
+    super(message);
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified detail message
+   * and cause.
+   * <p>
+   * <p> Note that the detail message associated with {@code cause} is
+   * <i>not</i> automatically incorporated into this exception's detail
+   * message.
+   *
+   * @param message The detail message (which is saved for later retrieval by
+   * the
+   * {@link #getMessage()} method)
+   * @param cause The cause (which is saved for later retrieval by the {@link
+   * #getCause()} method).  (A null value is permitted, and indicates that the
+   * cause is nonexistent or unknown.)
+   * @since 1.6
+   */
+  public KSMException(String message, Throwable cause,
+                      KSMException.ResultCodes result) {
+    super(message, cause);
+    this.result = result;
+  }
+
+  /**
+   * Constructs an {@code IOException} with the specified cause and a
+   * detail message of {@code (cause==null ? null : cause.toString())}
+   * (which typically contains the class and detail message of {@code cause}).
+   * This constructor is useful for IO exceptions that are little more
+   * than wrappers for other throwables.
+   *
+   * @param cause The cause (which is saved for later retrieval by the {@link
+   * #getCause()} method).  (A null value is permitted, and indicates that the
+   * cause is nonexistent or unknown.)
+   * @since 1.6
+   */
+  public KSMException(Throwable cause, KSMException.ResultCodes result) {
+    super(cause);
+    this.result = result;
+  }
+
+  /**
+   * Returns resultCode.
+   * @return ResultCode
+   */
+  public KSMException.ResultCodes getResult() {
+    return result;
+  }
+
+  /**
+   * Error codes to make it easy to decode these exceptions.
+   */
+  public enum ResultCodes {
+    FAILED_TOO_MANY_USER_VOLUMES,
+    FAILED_VOLUME_ALREADY_EXISTS,
+    FAILED_VOLUME_NOT_FOUND,
+    FAILED_VOLUME_NOT_EMPTY,
+    FAILED_USER_NOT_FOUND,
+    FAILED_BUCKET_ALREADY_EXISTS,
+    FAILED_BUCKET_NOT_FOUND,
+    FAILED_BUCKET_NOT_EMPTY,
+    FAILED_KEY_ALREADY_EXISTS,
+    FAILED_KEY_NOT_FOUND,
+    FAILED_KEY_ALLOCATION,
+    FAILED_KEY_DELETION,
+    FAILED_METADATA_ERROR,
+    FAILED_INTERNAL_ERROR,
+    KSM_NOT_INITIALIZED,
+    SCM_VERSION_MISMATCH_ERROR
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/exceptions/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/exceptions/package-info.java
new file mode 100644
index 0000000..09fd87f
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/exceptions/package-info.java
@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.ksm.exceptions;
+// Exception thrown by KSM.
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/package-info.java
new file mode 100644
index 0000000..09d9f32
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+/*
+ This package contains the keyspace manager classes.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/KeySpaceManagerProtocolServerSideTranslatorPB.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/KeySpaceManagerProtocolServerSideTranslatorPB.java
new file mode 100644
index 0000000..02a4120
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/KeySpaceManagerProtocolServerSideTranslatorPB.java
@@ -0,0 +1,539 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+import com.google.common.collect.Lists;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo;
+import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs;
+import org.apache.hadoop.ozone.ksm.helpers.OpenKeySession;
+import org.apache.hadoop.ozone.ksm.helpers.ServiceInfo;
+import org.apache.hadoop.ozone.ksm.protocol.KeySpaceManagerProtocol;
+import org.apache.hadoop.ozone.ksm.protocolPB.KeySpaceManagerProtocolPB;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.AllocateBlockRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.AllocateBlockResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CommitKeyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CommitKeyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateBucketRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateBucketResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoBucketRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoBucketResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetBucketPropertyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetBucketPropertyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteBucketRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteBucketResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CreateVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.LocateKeyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.LocateKeyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.KeyArgs;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetVolumePropertyRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.SetVolumePropertyResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CheckVolumeAccessRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.CheckVolumeAccessResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.InfoVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.DeleteVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListVolumeRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListVolumeResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListBucketsRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListBucketsResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListKeysRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ListKeysResponse;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.Status;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServiceListRequest;
+import org.apache.hadoop.ozone.protocol.proto
+    .KeySpaceManagerProtocolProtos.ServiceListResponse;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * This class is the server-side translator that forwards requests received on
+ * {@link org.apache.hadoop.ozone.ksm.protocolPB.KeySpaceManagerProtocolPB}
+ * to the KeySpaceManagerService server implementation.
+ */
+public class KeySpaceManagerProtocolServerSideTranslatorPB implements
+    KeySpaceManagerProtocolPB {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(KeySpaceManagerProtocolServerSideTranslatorPB.class);
+  private final KeySpaceManagerProtocol impl;
+
+  /**
+   * Constructs an instance of the server handler.
+   *
+   * @param impl KeySpaceManagerProtocolPB
+   */
+  public KeySpaceManagerProtocolServerSideTranslatorPB(
+      KeySpaceManagerProtocol impl) {
+    this.impl = impl;
+  }
+
+  // Convert and exception to corresponding status code
+  private Status exceptionToResponseStatus(IOException ex) {
+    if (ex instanceof KSMException) {
+      KSMException ksmException = (KSMException)ex;
+      switch (ksmException.getResult()) {
+      case FAILED_VOLUME_ALREADY_EXISTS:
+        return Status.VOLUME_ALREADY_EXISTS;
+      case FAILED_TOO_MANY_USER_VOLUMES:
+        return Status.USER_TOO_MANY_VOLUMES;
+      case FAILED_VOLUME_NOT_FOUND:
+        return Status.VOLUME_NOT_FOUND;
+      case FAILED_VOLUME_NOT_EMPTY:
+        return Status.VOLUME_NOT_EMPTY;
+      case FAILED_USER_NOT_FOUND:
+        return Status.USER_NOT_FOUND;
+      case FAILED_BUCKET_ALREADY_EXISTS:
+        return Status.BUCKET_ALREADY_EXISTS;
+      case FAILED_BUCKET_NOT_FOUND:
+        return Status.BUCKET_NOT_FOUND;
+      case FAILED_BUCKET_NOT_EMPTY:
+        return Status.BUCKET_NOT_EMPTY;
+      case FAILED_KEY_ALREADY_EXISTS:
+        return Status.KEY_ALREADY_EXISTS;
+      case FAILED_KEY_NOT_FOUND:
+        return Status.KEY_NOT_FOUND;
+      default:
+        return Status.INTERNAL_ERROR;
+      }
+    } else {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Unknown error occurs", ex);
+      }
+      return Status.INTERNAL_ERROR;
+    }
+  }
+
+  @Override
+  public CreateVolumeResponse createVolume(
+      RpcController controller, CreateVolumeRequest request)
+      throws ServiceException {
+    CreateVolumeResponse.Builder resp = CreateVolumeResponse.newBuilder();
+    resp.setStatus(Status.OK);
+    try {
+      impl.createVolume(KsmVolumeArgs.getFromProtobuf(request.getVolumeInfo()));
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public SetVolumePropertyResponse setVolumeProperty(
+      RpcController controller, SetVolumePropertyRequest request)
+      throws ServiceException {
+    SetVolumePropertyResponse.Builder resp =
+        SetVolumePropertyResponse.newBuilder();
+    resp.setStatus(Status.OK);
+    String volume = request.getVolumeName();
+
+    try {
+      if (request.hasQuotaInBytes()) {
+        long quota = request.getQuotaInBytes();
+        impl.setQuota(volume, quota);
+      } else {
+        String owner = request.getOwnerName();
+        impl.setOwner(volume, owner);
+      }
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public CheckVolumeAccessResponse checkVolumeAccess(
+      RpcController controller, CheckVolumeAccessRequest request)
+      throws ServiceException {
+    CheckVolumeAccessResponse.Builder resp =
+        CheckVolumeAccessResponse.newBuilder();
+    resp.setStatus(Status.OK);
+    try {
+      boolean access = impl.checkVolumeAccess(request.getVolumeName(),
+          request.getUserAcl());
+      // if no access, set the response status as access denied
+      if (!access) {
+        resp.setStatus(Status.ACCESS_DENIED);
+      }
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+
+    return resp.build();
+  }
+
+  @Override
+  public InfoVolumeResponse infoVolume(
+      RpcController controller, InfoVolumeRequest request)
+      throws ServiceException {
+    InfoVolumeResponse.Builder resp = InfoVolumeResponse.newBuilder();
+    resp.setStatus(Status.OK);
+    String volume = request.getVolumeName();
+    try {
+      KsmVolumeArgs ret = impl.getVolumeInfo(volume);
+      resp.setVolumeInfo(ret.getProtobuf());
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public DeleteVolumeResponse deleteVolume(
+      RpcController controller, DeleteVolumeRequest request)
+      throws ServiceException {
+    DeleteVolumeResponse.Builder resp = DeleteVolumeResponse.newBuilder();
+    resp.setStatus(Status.OK);
+    try {
+      impl.deleteVolume(request.getVolumeName());
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public ListVolumeResponse listVolumes(
+      RpcController controller, ListVolumeRequest request)
+      throws ServiceException {
+    ListVolumeResponse.Builder resp = ListVolumeResponse.newBuilder();
+    List<KsmVolumeArgs> result = Lists.newArrayList();
+    try {
+      if (request.getScope()
+          == ListVolumeRequest.Scope.VOLUMES_BY_USER) {
+        result = impl.listVolumeByUser(request.getUserName(),
+            request.getPrefix(), request.getPrevKey(), request.getMaxKeys());
+      } else if (request.getScope()
+          == ListVolumeRequest.Scope.VOLUMES_BY_CLUSTER) {
+        result = impl.listAllVolumes(request.getPrefix(), request.getPrevKey(),
+            request.getMaxKeys());
+      }
+
+      if (result == null) {
+        throw new ServiceException("Failed to get volumes for given scope "
+            + request.getScope());
+      }
+
+      result.forEach(item -> resp.addVolumeInfo(item.getProtobuf()));
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public CreateBucketResponse createBucket(
+      RpcController controller, CreateBucketRequest
+      request) throws ServiceException {
+    CreateBucketResponse.Builder resp =
+        CreateBucketResponse.newBuilder();
+    try {
+      impl.createBucket(KsmBucketInfo.getFromProtobuf(
+          request.getBucketInfo()));
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public InfoBucketResponse infoBucket(
+      RpcController controller, InfoBucketRequest request)
+      throws ServiceException {
+    InfoBucketResponse.Builder resp =
+        InfoBucketResponse.newBuilder();
+    try {
+      KsmBucketInfo ksmBucketInfo = impl.getBucketInfo(
+          request.getVolumeName(), request.getBucketName());
+      resp.setStatus(Status.OK);
+      resp.setBucketInfo(ksmBucketInfo.getProtobuf());
+    } catch(IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public LocateKeyResponse createKey(
+      RpcController controller, LocateKeyRequest request
+  ) throws ServiceException {
+    LocateKeyResponse.Builder resp =
+        LocateKeyResponse.newBuilder();
+    try {
+      KeyArgs keyArgs = request.getKeyArgs();
+      HddsProtos.ReplicationType type =
+          keyArgs.hasType()? keyArgs.getType() : null;
+      HddsProtos.ReplicationFactor factor =
+          keyArgs.hasFactor()? keyArgs.getFactor() : null;
+      KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder()
+          .setVolumeName(keyArgs.getVolumeName())
+          .setBucketName(keyArgs.getBucketName())
+          .setKeyName(keyArgs.getKeyName())
+          .setDataSize(keyArgs.getDataSize())
+          .setType(type)
+          .setFactor(factor)
+          .build();
+      if (keyArgs.hasDataSize()) {
+        ksmKeyArgs.setDataSize(keyArgs.getDataSize());
+      } else {
+        ksmKeyArgs.setDataSize(0);
+      }
+      OpenKeySession openKey = impl.openKey(ksmKeyArgs);
+      resp.setKeyInfo(openKey.getKeyInfo().getProtobuf());
+      resp.setID(openKey.getId());
+      resp.setOpenVersion(openKey.getOpenVersion());
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public LocateKeyResponse lookupKey(
+      RpcController controller, LocateKeyRequest request
+  ) throws ServiceException {
+    LocateKeyResponse.Builder resp =
+        LocateKeyResponse.newBuilder();
+    try {
+      KeyArgs keyArgs = request.getKeyArgs();
+      KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder()
+          .setVolumeName(keyArgs.getVolumeName())
+          .setBucketName(keyArgs.getBucketName())
+          .setKeyName(keyArgs.getKeyName())
+          .build();
+      KsmKeyInfo keyInfo = impl.lookupKey(ksmKeyArgs);
+      resp.setKeyInfo(keyInfo.getProtobuf());
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public SetBucketPropertyResponse setBucketProperty(
+      RpcController controller, SetBucketPropertyRequest request)
+      throws ServiceException {
+    SetBucketPropertyResponse.Builder resp =
+        SetBucketPropertyResponse.newBuilder();
+    try {
+      impl.setBucketProperty(KsmBucketArgs.getFromProtobuf(
+          request.getBucketArgs()));
+      resp.setStatus(Status.OK);
+    } catch(IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public LocateKeyResponse deleteKey(RpcController controller,
+      LocateKeyRequest request) throws ServiceException {
+    LocateKeyResponse.Builder resp =
+        LocateKeyResponse.newBuilder();
+    try {
+      KeyArgs keyArgs = request.getKeyArgs();
+      KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder()
+          .setVolumeName(keyArgs.getVolumeName())
+          .setBucketName(keyArgs.getBucketName())
+          .setKeyName(keyArgs.getKeyName())
+          .build();
+      impl.deleteKey(ksmKeyArgs);
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public DeleteBucketResponse deleteBucket(
+      RpcController controller, DeleteBucketRequest request)
+      throws ServiceException {
+    DeleteBucketResponse.Builder resp = DeleteBucketResponse.newBuilder();
+    resp.setStatus(Status.OK);
+    try {
+      impl.deleteBucket(request.getVolumeName(), request.getBucketName());
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public ListBucketsResponse listBuckets(
+      RpcController controller, ListBucketsRequest request)
+      throws ServiceException {
+    ListBucketsResponse.Builder resp =
+        ListBucketsResponse.newBuilder();
+    try {
+      List<KsmBucketInfo> buckets = impl.listBuckets(
+          request.getVolumeName(),
+          request.getStartKey(),
+          request.getPrefix(),
+          request.getCount());
+      for(KsmBucketInfo bucket : buckets) {
+        resp.addBucketInfo(bucket.getProtobuf());
+      }
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public ListKeysResponse listKeys(RpcController controller,
+      ListKeysRequest request) throws ServiceException {
+    ListKeysResponse.Builder resp =
+        ListKeysResponse.newBuilder();
+    try {
+      List<KsmKeyInfo> keys = impl.listKeys(
+          request.getVolumeName(),
+          request.getBucketName(),
+          request.getStartKey(),
+          request.getPrefix(),
+          request.getCount());
+      for(KsmKeyInfo key : keys) {
+        resp.addKeyInfo(key.getProtobuf());
+      }
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public CommitKeyResponse commitKey(RpcController controller,
+      CommitKeyRequest request) throws ServiceException {
+    CommitKeyResponse.Builder resp =
+        CommitKeyResponse.newBuilder();
+    try {
+      KeyArgs keyArgs = request.getKeyArgs();
+      HddsProtos.ReplicationType type =
+          keyArgs.hasType()? keyArgs.getType() : null;
+      HddsProtos.ReplicationFactor factor =
+          keyArgs.hasFactor()? keyArgs.getFactor() : null;
+      KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder()
+          .setVolumeName(keyArgs.getVolumeName())
+          .setBucketName(keyArgs.getBucketName())
+          .setKeyName(keyArgs.getKeyName())
+          .setDataSize(keyArgs.getDataSize())
+          .setType(type)
+          .setFactor(factor)
+          .build();
+      int id = request.getClientID();
+      impl.commitKey(ksmKeyArgs, id);
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public AllocateBlockResponse allocateBlock(RpcController controller,
+      AllocateBlockRequest request) throws ServiceException {
+    AllocateBlockResponse.Builder resp =
+        AllocateBlockResponse.newBuilder();
+    try {
+      KeyArgs keyArgs = request.getKeyArgs();
+      HddsProtos.ReplicationType type =
+          keyArgs.hasType()? keyArgs.getType() : null;
+      HddsProtos.ReplicationFactor factor =
+          keyArgs.hasFactor()? keyArgs.getFactor() : null;
+      KsmKeyArgs ksmKeyArgs = new KsmKeyArgs.Builder()
+          .setVolumeName(keyArgs.getVolumeName())
+          .setBucketName(keyArgs.getBucketName())
+          .setKeyName(keyArgs.getKeyName())
+          .setType(type)
+          .setFactor(factor)
+          .build();
+      int id = request.getClientID();
+      KsmKeyLocationInfo newLocation = impl.allocateBlock(ksmKeyArgs, id);
+      resp.setKeyLocation(newLocation.getProtobuf());
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+
+  @Override
+  public ServiceListResponse getServiceList(RpcController controller,
+      ServiceListRequest request) throws ServiceException {
+    ServiceListResponse.Builder resp = ServiceListResponse.newBuilder();
+    try {
+      resp.addAllServiceInfo(impl.getServiceList().stream()
+          .map(ServiceInfo::getProtobuf)
+          .collect(Collectors.toList()));
+      resp.setStatus(Status.OK);
+    } catch (IOException e) {
+      resp.setStatus(exceptionToResponseStatus(e));
+    }
+    return resp.build();
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
new file mode 100644
index 0000000..e9c2430
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/protocolPB/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.protocolPB;
+
+/**
+ * KSM protocol buffer translators.
+ */
\ No newline at end of file
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Handler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Handler.java
new file mode 100644
index 0000000..99020c8
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Handler.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClient;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.http.client.utils.URIBuilder;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Common interface for command handling.
+ */
+public abstract class Handler {
+
+  protected OzoneRestClient client;
+
+  /**
+   * Constructs a client object.
+   */
+  public Handler() {
+    client = new OzoneRestClient();
+  }
+
+  /**
+   * Executes the Client command.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  protected abstract void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException;
+
+  /**
+   * verifies user provided URI.
+   *
+   * @param uri - UriString
+   * @return URI
+   * @throws URISyntaxException
+   * @throws OzoneException
+   */
+  protected URI verifyURI(String uri) throws URISyntaxException,
+      OzoneException {
+    if ((uri == null) || uri.isEmpty()) {
+      throw new OzoneRestClientException(
+          "Ozone URI is needed to execute this command.");
+    }
+    URIBuilder ozoneURI = new URIBuilder(uri);
+
+    if (ozoneURI.getPort() == 0) {
+      ozoneURI.setPort(Shell.DEFAULT_OZONE_PORT);
+    }
+    return ozoneURI.build();
+  }
+
+
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java
new file mode 100644
index 0000000..2aec0fc
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java
@@ -0,0 +1,415 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.bucket.UpdateBucketHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.DeleteKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.GetKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.InfoKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.ListKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.PutKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.volume.CreateVolumeHandler;
+import org.apache.hadoop.ozone.web.ozShell.volume.DeleteVolumeHandler;
+import org.apache.hadoop.ozone.web.ozShell.volume.InfoVolumeHandler;
+import org.apache.hadoop.ozone.web.ozShell.volume.ListVolumeHandler;
+import org.apache.hadoop.ozone.web.ozShell.volume.UpdateVolumeHandler;
+import org.apache.hadoop.ozone.web.ozShell.bucket.CreateBucketHandler;
+import org.apache.hadoop.ozone.web.ozShell.bucket.DeleteBucketHandler;
+import org.apache.hadoop.ozone.web.ozShell.bucket.InfoBucketHandler;
+import org.apache.hadoop.ozone.web.ozShell.bucket.ListBucketHandler;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * Ozone user interface commands.
+ *
+ * This class uses dispatch method to make calls
+ * to appropriate handlers that execute the ozone functions.
+ */
+public class Shell extends Configured implements Tool {
+  private static final Logger LOG = LoggerFactory.getLogger(Shell.class);
+
+  // General options
+  public static final int DEFAULT_OZONE_PORT = 50070;
+  public static final String VERBOSE = "v";
+
+  // volume related command line arguments
+  public static final String RUNAS = "root";
+  public static final String USER = "user";
+  public static final String OWNER = "owner";
+  public static final String QUOTA = "quota";
+  public static final String CREATE_VOLUME = "createVolume";
+  public static final String UPDATE_VOLUME = "updateVolume";
+  public static final String DELETE_VOLUME = "deleteVolume";
+  public static final String LIST_VOLUME = "listVolume";
+  public static final String INFO_VOLUME = "infoVolume";
+
+  // bucket related command line arguments
+  public static final String CREATE_BUCKET = "createBucket";
+  public static final String UPDATE_BUCKET = "updateBucket";
+  public static final String DELETE_BUCKET = "deleteBucket";
+  public static final String LIST_BUCKET = "listBucket";
+  public static final String INFO_BUCKET = "infoBucket";
+  public static final String ADD_ACLS = "addAcl";
+  public static final String REMOVE_ACLS = "removeAcl";
+  // TODO : Support versioning and StorageType for buckets
+
+  //Object related command line arguments
+  public static final String PUT_KEY = "putKey";
+  public static final String GET_KEY = "getKey";
+  public static final String INFO_KEY = "infoKey";
+  public static final String DELETE_KEY = "deleteKey";
+  public static final String LIST_KEY = "listKey";
+  public static final String FILE = "file";
+
+  // Listing related command line arguments
+  public static final String LIST_LENGTH = "length";
+  public static final String START = "start";
+  public static final String PREFIX = "prefix";
+
+  /**
+   * Main for the ozShell Command handling.
+   *
+   * @param argv - System Args Strings[]
+   * @throws Exception
+   */
+  public static void main(String[] argv) throws Exception {
+    Shell shell = new Shell();
+    Configuration conf = new OzoneConfiguration();
+    conf.setQuietMode(false);
+    shell.setConf(conf);
+    int res = 0;
+    try {
+      res = ToolRunner.run(shell, argv);
+    } catch (Exception ex) {
+      System.err.println("ERROR: " + ex.getMessage());
+      System.exit(1);
+    }
+    System.exit(res);
+  }
+
+  /**
+   * Execute the command with the given arguments.
+   *
+   * @param args command specific arguments.
+   *
+   * @return exit code.
+   *
+   * @throws Exception
+   */
+  @Override
+  public int run(String[] args) throws Exception {
+    Options opts = getOpts();
+    CommandLine cmd = parseArgs(args, opts);
+    return dispatch(cmd, opts);
+  }
+
+  /**
+   * returns the Command Line Options.
+   *
+   * @return Options
+   */
+  private Options getOpts() {
+    Options opts = new Options();
+    addVolumeCommands(opts);
+    addBucketCommands(opts);
+    addKeyCommands(opts);
+    addListingCommands(opts);
+    return opts;
+  }
+
+  /**
+   * This function parses all command line arguments
+   * and returns the appropriate values.
+   *
+   * @param argv - Argv from main
+   *
+   * @return CommandLine
+   */
+  private CommandLine parseArgs(String[] argv, Options opts)
+      throws org.apache.commons.cli.ParseException {
+    try {
+      BasicParser parser = new BasicParser();
+      return parser.parse(opts, argv);
+    } catch (ParseException ex) {
+      System.out.printf(ex.getMessage());
+    }
+
+    return null;
+  }
+
+
+  /**
+   * All volume related commands are added in this function for the command
+   * parser.
+   *
+   * @param options - Command Options class.
+   */
+  private void addVolumeCommands(Options options) {
+    Option verbose = new Option(VERBOSE, false, "verbose information output.");
+    options.addOption(verbose);
+
+    Option runas = new Option(RUNAS, false, "Run the command as \"hdfs\" user");
+    options.addOption(runas);
+
+    Option userName = new Option(USER, true,
+                                 "Name of the user in volume management " +
+                                     "functions");
+    options.addOption(userName);
+
+    Option quota = new Option(QUOTA, true, "Quota for the volume. E.g. 10TB");
+    options.addOption(quota);
+
+
+    Option createVolume = new Option(CREATE_VOLUME, true, "creates a volume" +
+        "for the specified user.\n \t For example : hdfs o3  -createVolume " +
+        "<volumeURI> -root -user <userName>\n");
+    options.addOption(createVolume);
+
+    Option deleteVolume = new Option(DELETE_VOLUME, true, "deletes a volume" +
+        "if it is empty.\n \t For example : ozone oz -deleteVolume <volumeURI>" +
+        " -root \n");
+    options.addOption(deleteVolume);
+
+    Option listVolume =
+        new Option(LIST_VOLUME, true, "List the volumes of a given user.\n" +
+            "For example : ozone oz -listVolume <ozoneURI>" +
+            "-user <username> -root or ozone oz " +
+            "-listVolume");
+    options.addOption(listVolume);
+
+    Option updateVolume =
+        new Option(UPDATE_VOLUME, true, "updates an existing volume.\n" +
+            "\t For example : ozone oz " +
+            "-updateVolume <volumeURI> -quota " +
+            "100TB\n");
+    options.addOption(updateVolume);
+
+    Option infoVolume = new Option(INFO_VOLUME, true,
+                                   "returns information about a specific " +
+                                       "volume.");
+    options.addOption(infoVolume);
+  }
+
+  /**
+   * All bucket related commands for ozone.
+   *
+   * @param opts - Options
+   */
+  private void addBucketCommands(Options opts) {
+    Option createBucket = new Option(CREATE_BUCKET, true,
+        "creates a bucket in a given volume." +
+        "For example: ozone oz -createBucket <bucketURI>");
+    opts.addOption(createBucket);
+
+    Option infoBucket =
+        new Option(INFO_BUCKET, true, "returns information about a bucket.");
+    opts.addOption(infoBucket);
+
+    Option deleteBucket =
+        new Option(DELETE_BUCKET, true, "deletes an empty bucket.");
+    opts.addOption(deleteBucket);
+
+    Option listBucket =
+        new Option(LIST_BUCKET, true, "lists the buckets in a volume.");
+    opts.addOption(listBucket);
+
+    Option updateBucket =
+        new Option(UPDATE_BUCKET, true, "allows changing bucket attributes.\n" +
+            " For example: ozone oz -updateBucket <bucketURI> " +
+            "-addAcl user:frodo:rw");
+    opts.addOption(updateBucket);
+
+    Option addAcl =
+        new Option(ADD_ACLS, true, "allows user to add acls to a bucket.");
+    opts.addOption(addAcl);
+
+    Option removeAcl =
+        new Option(REMOVE_ACLS, true, "allows user to remove acls from a " +
+            "bucket.");
+    opts.addOption(removeAcl);
+  }
+
+  /**
+   * All key commands.
+   *
+   * @param opts - options
+   */
+  private void addKeyCommands(Options opts) {
+    Option putKey =
+        new Option(PUT_KEY, true, "creates or overwrites an existing key");
+    opts.addOption(putKey);
+
+    Option deleteKey =
+        new Option(DELETE_KEY, true, "deletes an existing key");
+    opts.addOption(deleteKey);
+
+    Option infoKey =
+        new Option(INFO_KEY, true, "returns information about an existing key");
+    opts.addOption(infoKey);
+
+    Option listKey =
+        new Option(LIST_KEY, true, "list all keys in a given bucket");
+    opts.addOption(listKey);
+
+    Option getKey =
+        new Option(GET_KEY, true, "Gets a specific key from ozone server.");
+    opts.addOption(getKey);
+
+    Option fileArgument =
+        new Option(FILE, true, "Data file path");
+    opts.addOption(fileArgument);
+
+  }
+
+  /**
+   * Sub commands for list command.
+   * @param opts
+   */
+  private void addListingCommands(Options opts) {
+    Option maxKeys = new Option(LIST_LENGTH, true,
+        "Specify the max length of listing result.");
+    opts.addOption(maxKeys);
+
+    Option prevKey = new Option(START, true,
+        "Specify the start key where to start listing from.");
+    opts.addOption(prevKey);
+
+    Option prefix = new Option(PREFIX, true,
+        "Specify the prefix to filter the listing result.");
+    opts.addOption(prefix);
+  }
+
+  /**
+   * Dispatches calls to the right command Handler classes.
+   *
+   * @param cmd - CommandLine
+   *
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  private int dispatch(CommandLine cmd, Options opts)
+      throws IOException, OzoneException, URISyntaxException {
+    Handler handler = null;
+    final int eightyColumn = 80;
+
+    try {
+
+      // volume functions
+      if (cmd.hasOption(Shell.CREATE_VOLUME)) {
+        handler = new CreateVolumeHandler();
+      }
+
+      if (cmd.hasOption(Shell.DELETE_VOLUME)) {
+        handler = new DeleteVolumeHandler();
+      }
+
+      if (cmd.hasOption(Shell.LIST_VOLUME)) {
+        handler = new ListVolumeHandler();
+      }
+
+      if (cmd.hasOption(Shell.UPDATE_VOLUME)) {
+        handler = new UpdateVolumeHandler();
+      }
+
+      if (cmd.hasOption(Shell.INFO_VOLUME)) {
+        handler = new InfoVolumeHandler();
+      }
+
+      // bucket functions
+      if (cmd.hasOption(Shell.CREATE_BUCKET)) {
+        handler = new CreateBucketHandler();
+      }
+
+      if (cmd.hasOption(Shell.DELETE_BUCKET)) {
+        handler = new DeleteBucketHandler();
+      }
+
+      if (cmd.hasOption(Shell.INFO_BUCKET)) {
+        handler = new InfoBucketHandler();
+      }
+
+      if (cmd.hasOption(Shell.LIST_BUCKET)) {
+        handler = new ListBucketHandler();
+      }
+
+      if(cmd.hasOption(Shell.UPDATE_BUCKET)){
+        handler = new UpdateBucketHandler();
+      }
+
+      //Key Functions
+
+      if(cmd.hasOption(Shell.PUT_KEY)) {
+        handler = new PutKeyHandler();
+      }
+
+      if(cmd.hasOption(Shell.DELETE_KEY)) {
+        handler = new DeleteKeyHandler();
+      }
+
+      if(cmd.hasOption(Shell.INFO_KEY)) {
+        handler = new InfoKeyHandler();
+      }
+
+      if(cmd.hasOption(Shell.LIST_KEY)) {
+        handler = new ListKeyHandler();
+      }
+
+      if(cmd.hasOption(Shell.GET_KEY)) {
+        handler = new GetKeyHandler();
+      }
+
+      if (handler != null) {
+        handler.execute(cmd);
+        return 0;
+      } else {
+        HelpFormatter helpFormatter = new HelpFormatter();
+        helpFormatter.printHelp(eightyColumn, "ozone oz -command uri [args]",
+            "Ozone Commands",
+            opts, "Please correct your command and try again.");
+        return 1;
+      }
+    } catch (IOException | URISyntaxException ex) {
+      System.err.printf("Command Failed : %s%n", ex.getMessage());
+    } catch (OzoneException ex) {
+      System.err.printf("Command Failed : %s%n", ex.toJsonString());
+      LOG.debug("Command Failed.", ex);
+    } catch (IllegalArgumentException ex) {
+      System.err.printf("Illegal argument: %s%n", ex.getMessage());
+    }
+    return 1;
+  }
+}
+
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/CreateBucketHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/CreateBucketHandler.java
new file mode 100644
index 0000000..d1c46b5
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/CreateBucketHandler.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.ozShell.bucket;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * create bucket handler.
+ */
+public class CreateBucketHandler extends Handler {
+
+  private String volumeName;
+  private String bucketName;
+  private String rootName;
+
+  /**
+   * Executes create bucket.
+   *
+   * @param cmd - CommandLine
+   *
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.CREATE_BUCKET)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : createBucket is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.CREATE_BUCKET);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 2) {
+      throw new OzoneRestClientException(
+          "volume and bucket name required in createBucket");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+    }
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    OzoneBucket bucket = vol.createBucket(bucketName);
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+          bucket.getBucketInfo().toJsonString()));
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/DeleteBucketHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/DeleteBucketHandler.java
new file mode 100644
index 0000000..181f6cc
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/DeleteBucketHandler.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.bucket;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Delete bucket Handler.
+ */
+public class DeleteBucketHandler extends Handler {
+  private String volumeName;
+  private String bucketName;
+  private String rootName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   *
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.DELETE_BUCKET)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : deleteBucket is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.DELETE_BUCKET);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 2) {
+      throw new OzoneRestClientException(
+          "volume and bucket name required in delete Bucket");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+    }
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    vol.deleteBucket(bucketName);
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/InfoBucketHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/InfoBucketHandler.java
new file mode 100644
index 0000000..321c4c2
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/InfoBucketHandler.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.ozShell.bucket;
+
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Executes Info bucket.
+ */
+public class InfoBucketHandler extends Handler {
+  private String volumeName;
+  private String bucketName;
+  private String rootName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   *
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.INFO_BUCKET)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : infoBucket is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.INFO_BUCKET);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+
+    if (path.getNameCount() < 2) {
+      throw new OzoneRestClientException(
+          "volume and bucket name required in info Bucket");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+    }
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    OzoneBucket bucket = vol.getBucket(bucketName);
+
+    System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+        bucket.getBucketInfo().toJsonString()));
+  }
+
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/ListBucketHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/ListBucketHandler.java
new file mode 100644
index 0000000..ea77cae
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/ListBucketHandler.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.bucket;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.response.BucketInfo;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Executes List Bucket.
+ */
+public class ListBucketHandler extends Handler {
+  private String volumeName;
+  private String rootName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   *
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.LIST_BUCKET)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : listBucket is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.LIST_BUCKET);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 1) {
+      throw new OzoneRestClientException("volume is required in listBucket");
+    }
+
+    volumeName = path.getName(0).toString();
+
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+    }
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+    String length = null;
+    if (cmd.hasOption(Shell.LIST_LENGTH)) {
+      length = cmd.getOptionValue(Shell.LIST_LENGTH);
+      OzoneUtils.verifyMaxKeyLength(length);
+    }
+
+    String startBucket = null;
+    if (cmd.hasOption(Shell.START)) {
+      startBucket = cmd.getOptionValue(Shell.START);
+    }
+
+    String prefix = null;
+    if (cmd.hasOption(Shell.PREFIX)) {
+      prefix = cmd.getOptionValue(Shell.PREFIX);
+    }
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    List<OzoneBucket> bucketList = vol.listBuckets(length, startBucket, prefix);
+
+    List<BucketInfo> jsonData = bucketList.stream()
+        .map(OzoneBucket::getBucketInfo).collect(Collectors.toList());
+    System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+        JsonUtils.toJsonString(jsonData)));
+  }
+}
+
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/UpdateBucketHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/UpdateBucketHandler.java
new file mode 100644
index 0000000..781a00c
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/UpdateBucketHandler.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.web.ozShell.bucket;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Allows users to add and remove acls and from a bucket.
+ */
+public class UpdateBucketHandler extends Handler {
+  private String volumeName;
+  private String bucketName;
+  private String rootName;
+
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.UPDATE_BUCKET)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : updateBucket is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.UPDATE_BUCKET);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+
+    if (path.getNameCount() < 2) {
+      throw new OzoneRestClientException(
+          "volume and bucket name required in update bucket");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+    }
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    if (cmd.hasOption(Shell.ADD_ACLS)) {
+      String aclString = cmd.getOptionValue(Shell.ADD_ACLS);
+      String[] aclArray = aclString.split(",");
+      vol.addAcls(bucketName, aclArray);
+    }
+
+    if (cmd.hasOption(Shell.REMOVE_ACLS)) {
+      String aclString = cmd.getOptionValue(Shell.REMOVE_ACLS);
+      String[] aclArray = aclString.split(",");
+      vol.removeAcls(bucketName, aclArray);
+    }
+
+    OzoneBucket bucket = vol.getBucket(bucketName);
+    System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+        bucket.getBucketInfo().toJsonString()));
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/package-info.java
new file mode 100644
index 0000000..c344c35
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/bucket/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Netty-based HTTP server implementation for Ozone.
+ */
+package org.apache.hadoop.ozone.web.ozShell.bucket;
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/DeleteKeyHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/DeleteKeyHandler.java
new file mode 100644
index 0000000..1ad2588
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/DeleteKeyHandler.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.keys;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Executes Delete Key.
+ */
+public class DeleteKeyHandler extends Handler {
+  private String userName;
+  private String volumeName;
+  private String bucketName;
+  private String keyName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.DELETE_KEY)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : deleteKey is missing");
+    }
+
+
+    if (cmd.hasOption(Shell.USER)) {
+      userName = cmd.getOptionValue(Shell.USER);
+    } else {
+      userName = System.getProperty("user.name");
+    }
+
+
+    String ozoneURIString = cmd.getOptionValue(Shell.DELETE_KEY);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 3) {
+      throw new OzoneRestClientException(
+          "volume/bucket/key name required in deleteKey");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+    keyName = path.getName(2).toString();
+
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+      System.out.printf("Key Name : %s%n", keyName);
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(userName);
+
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    OzoneBucket bucket = vol.getBucket(bucketName);
+    bucket.deleteKey(keyName);
+
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/GetKeyHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/GetKeyHandler.java
new file mode 100644
index 0000000..a56bbc0
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/GetKeyHandler.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.keys;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Gets an existing key.
+ */
+public class GetKeyHandler extends Handler {
+  private String userName;
+  private String volumeName;
+  private String bucketName;
+  private String keyName;
+
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   *
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.GET_KEY)) {
+      throw new OzoneRestClientException("Incorrect call : getKey is missing");
+    }
+
+    if (!cmd.hasOption(Shell.FILE)) {
+      throw new OzoneRestClientException(
+          "get key needs a file path to download to");
+    }
+
+    if (cmd.hasOption(Shell.USER)) {
+      userName = cmd.getOptionValue(Shell.USER);
+    } else {
+      userName = System.getProperty("user.name");
+    }
+
+
+    String ozoneURIString = cmd.getOptionValue(Shell.GET_KEY);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 3) {
+      throw new OzoneRestClientException(
+          "volume/bucket/key name required in putKey");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+    keyName = path.getName(2).toString();
+
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+      System.out.printf("Key Name : %s%n", keyName);
+    }
+
+
+    String fileName = cmd.getOptionValue(Shell.FILE);
+    Path dataFilePath = Paths.get(fileName);
+    File dataFile = new File(fileName);
+
+
+    if (dataFile.exists()) {
+      throw new OzoneRestClientException(fileName +
+                                         "exists. Download will overwrite an " +
+                                         "existing file. Aborting.");
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(userName);
+
+    client.getKey(volumeName, bucketName, keyName, dataFilePath);
+    if(cmd.hasOption(Shell.VERBOSE)) {
+      FileInputStream stream = new FileInputStream(dataFile);
+      String hash = DigestUtils.md5Hex(stream);
+      System.out.printf("Downloaded file hash : %s%n", hash);
+    }
+
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/InfoKeyHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/InfoKeyHandler.java
new file mode 100644
index 0000000..3e9b2da
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/InfoKeyHandler.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.keys;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneBucket;
+import org.apache.hadoop.ozone.web.client.OzoneKey;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+/**
+ * Executes Info Object.
+ */
+public class InfoKeyHandler extends Handler {
+  private String userName;
+  private String volumeName;
+  private String bucketName;
+  private String keyName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.INFO_KEY)) {
+      throw new OzoneRestClientException("Incorrect call : infoKey is missing");
+    }
+
+
+    if (cmd.hasOption(Shell.USER)) {
+      userName = cmd.getOptionValue(Shell.USER);
+    } else {
+      userName = System.getProperty("user.name");
+    }
+
+
+    String ozoneURIString = cmd.getOptionValue(Shell.INFO_KEY);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 3) {
+      throw new OzoneRestClientException(
+          "volume/bucket/key name required in infoKey");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+    keyName = path.getName(2).toString();
+
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+      System.out.printf("Key Name : %s%n", keyName);
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(userName);
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    OzoneBucket bucket = vol.getBucket(bucketName);
+    OzoneKey key = bucket.getKeyInfo(keyName);
+
+    System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+        key.getObjectInfo().toJsonString()));
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/ListKeyHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/ListKeyHandler.java
new file mode 100644
index 0000000..ea563ad
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/ListKeyHandler.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.keys;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneKey;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.response.KeyInfo;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Executes List Keys.
+ */
+public class ListKeyHandler extends Handler {
+  private String userName;
+  private String volumeName;
+  private String bucketName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+
+    if (!cmd.hasOption(Shell.LIST_KEY)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : listKey is missing");
+    }
+
+    String length = null;
+    if (cmd.hasOption(Shell.LIST_LENGTH)) {
+      length = cmd.getOptionValue(Shell.LIST_LENGTH);
+      OzoneUtils.verifyMaxKeyLength(length);
+    }
+
+    String startKey = null;
+    if (cmd.hasOption(Shell.START)) {
+      startKey = cmd.getOptionValue(Shell.START);
+    }
+
+    String prefix = null;
+    if (cmd.hasOption(Shell.PREFIX)) {
+      prefix = cmd.getOptionValue(Shell.PREFIX);
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.LIST_KEY);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 2) {
+      throw new OzoneRestClientException(
+          "volume/bucket is required in listKey");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("bucket Name : %s%n", bucketName);
+    }
+
+    if (cmd.hasOption(Shell.USER)) {
+      userName = cmd.getOptionValue(Shell.USER);
+    } else {
+      userName = System.getProperty("user.name");
+    }
+
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(userName);
+
+    List<OzoneKey> keys = client.listKeys(volumeName, bucketName, length,
+        startKey, prefix);
+
+    List<KeyInfo> jsonData = keys.stream()
+        .map(OzoneKey::getObjectInfo).collect(Collectors.toList());
+    System.out.printf(JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+        JsonUtils.toJsonString(jsonData)));
+  }
+
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/PutKeyHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/PutKeyHandler.java
new file mode 100644
index 0000000..1f2c692
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/PutKeyHandler.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.keys;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Puts a file into an ozone bucket.
+ */
+public class PutKeyHandler extends Handler {
+  private String userName;
+  private String volumeName;
+  private String bucketName;
+  private String keyName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.PUT_KEY)) {
+      throw new OzoneRestClientException("Incorrect call : putKey is missing");
+    }
+
+    if (!cmd.hasOption(Shell.FILE)) {
+      throw new OzoneRestClientException("put key needs a file to put");
+    }
+
+    if (cmd.hasOption(Shell.USER)) {
+      userName = cmd.getOptionValue(Shell.USER);
+    } else {
+      userName = System.getProperty("user.name");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.PUT_KEY);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    Path path = Paths.get(ozoneURI.getPath());
+    if (path.getNameCount() < 3) {
+      throw new OzoneRestClientException(
+          "volume/bucket/key name required in putKey");
+    }
+
+    volumeName = path.getName(0).toString();
+    bucketName = path.getName(1).toString();
+    keyName = path.getName(2).toString();
+
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume Name : %s%n", volumeName);
+      System.out.printf("Bucket Name : %s%n", bucketName);
+      System.out.printf("Key Name : %s%n", keyName);
+    }
+
+
+    String fileName = cmd.getOptionValue(Shell.FILE);
+    File dataFile = new File(fileName);
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      FileInputStream stream = new FileInputStream(dataFile);
+      String hash = DigestUtils.md5Hex(stream);
+      System.out.printf("File Hash : %s%n", hash);
+      stream.close();
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(userName);
+
+    client.putKey(volumeName, bucketName, keyName, dataFile);
+  }
+
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/package-info.java
new file mode 100644
index 0000000..1deb7ad
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/keys/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Netty-based HTTP server implementation for Ozone.
+ */
+package org.apache.hadoop.ozone.web.ozShell.keys;
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/package-info.java
new file mode 100644
index 0000000..e33b6e7
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/package-info.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+/**
+ *  ozShell Class acts as the command line interface to
+ *  the ozone Rest Client.
+ */
+package org.apache.hadoop.ozone.web.ozShell;
+
+/**
+ A simple CLI to work against Ozone.
+ **/
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/CreateVolumeHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/CreateVolumeHandler.java
new file mode 100644
index 0000000..c0b0bb93
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/CreateVolumeHandler.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.volume;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Executes the create volume call for the shell.
+ */
+public class CreateVolumeHandler extends Handler {
+
+  private String rootName;
+  private String userName;
+  private String volumeName;
+  private String quota;
+
+  /**
+   * Executes the Create Volume.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.CREATE_VOLUME)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : createVolume is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.CREATE_VOLUME);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    if (ozoneURI.getPath().isEmpty()) {
+      throw new OzoneRestClientException(
+          "Volume name is required to create a volume");
+    }
+
+    // we need to skip the slash in the URI path
+    // getPath returns /volumeName needs to remove the first slash.
+    volumeName = ozoneURI.getPath().substring(1);
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume name : %s%n", volumeName);
+    }
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+    if (!cmd.hasOption(Shell.USER)) {
+      throw new OzoneRestClientException(
+          "User name is needed in createVolume call.");
+    }
+
+    if (cmd.hasOption(Shell.QUOTA)) {
+      quota = cmd.getOptionValue(Shell.QUOTA);
+    }
+
+    userName = cmd.getOptionValue(Shell.USER);
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+    OzoneVolume vol = client.createVolume(volumeName, userName, quota);
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("%s%n",
+          JsonUtils.toJsonStringWithDefaultPrettyPrinter(vol.getJsonString()));
+    }
+  }
+}
+
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/DeleteVolumeHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/DeleteVolumeHandler.java
new file mode 100644
index 0000000..bed7db3f
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/DeleteVolumeHandler.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.volume;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Executes deleteVolume call for the shell.
+ */
+public class DeleteVolumeHandler extends Handler {
+
+  private String volumeName;
+  private String rootName;
+
+  /**
+   * Executes the delete volume call.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+
+    if (!cmd.hasOption(Shell.DELETE_VOLUME)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : deleteVolume call is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.DELETE_VOLUME);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    if (ozoneURI.getPath().isEmpty()) {
+      throw new OzoneRestClientException(
+          "Volume name is required to delete a volume");
+    }
+
+    // we need to skip the slash in the URI path
+    volumeName = ozoneURI.getPath().substring(1);
+
+    if (cmd.hasOption(Shell.VERBOSE)) {
+      System.out.printf("Volume name : %s%n", volumeName);
+    }
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+    client.deleteVolume(volumeName);
+
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/InfoVolumeHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/InfoVolumeHandler.java
new file mode 100644
index 0000000..16de3d4
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/InfoVolumeHandler.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.volume;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Executes volume Info calls.
+ */
+public class InfoVolumeHandler extends Handler{
+
+  private String rootName;
+  private String volumeName;
+  private String userName;
+
+  /**
+   * Executes volume Info.
+   *
+   * @param cmd - CommandLine
+   *
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+
+    if (!cmd.hasOption(Shell.INFO_VOLUME)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : infoVolume is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.INFO_VOLUME);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    if (ozoneURI.getPath().isEmpty()) {
+      throw new OzoneRestClientException(
+          "Volume name is required to get info of a volume");
+    }
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    }
+
+    // we need to skip the slash in the URI path
+    volumeName = ozoneURI.getPath().substring(1);
+
+    if (cmd.hasOption(Shell.USER)) {
+      userName = cmd.getOptionValue(Shell.USER);
+    } else {
+      userName = System.getProperty("user.name");
+    }
+
+    client.setEndPointURI(ozoneURI);
+
+    if (rootName != null) {
+      client.setUserAuth(rootName);
+    } else {
+      client.setUserAuth(userName);
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+    OzoneVolume vol = client.getVolume(volumeName);
+    System.out.printf("%s%n",
+        JsonUtils.toJsonStringWithDefaultPrettyPrinter(vol.getJsonString()));
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/ListVolumeHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/ListVolumeHandler.java
new file mode 100644
index 0000000..189c891
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/ListVolumeHandler.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.volume;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.web.client.OzoneVolume;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+import org.apache.hadoop.ozone.web.response.VolumeInfo;
+import org.apache.hadoop.ozone.web.utils.JsonUtils;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Executes List Volume call.
+ */
+public class ListVolumeHandler extends Handler {
+  private String rootName;
+  private String userName;
+
+  /**
+   * Executes the Client Calls.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+
+    if (!cmd.hasOption(Shell.LIST_VOLUME)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : listVolume is missing");
+    }
+
+    int maxKeys = 0;
+    if (cmd.hasOption(Shell.LIST_LENGTH)) {
+      String length = cmd.getOptionValue(Shell.LIST_LENGTH);
+      OzoneUtils.verifyMaxKeyLength(length);
+
+      maxKeys = Integer.parseInt(length);
+    }
+
+    String startVolume = null;
+    if (cmd.hasOption(Shell.START)) {
+      startVolume = cmd.getOptionValue(Shell.START);
+    }
+
+    String prefix = null;
+    if (cmd.hasOption(Shell.PREFIX)) {
+      prefix = cmd.getOptionValue(Shell.PREFIX);
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.LIST_VOLUME);
+    URI ozoneURI = verifyURI(ozoneURIString);
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    }
+
+    if (cmd.hasOption(Shell.USER)) {
+      userName = cmd.getOptionValue(Shell.USER);
+    } else {
+      userName = System.getProperty("user.name");
+    }
+
+    client.setEndPointURI(ozoneURI);
+    if (rootName != null) {
+      client.setUserAuth(rootName);
+    } else {
+      client.setUserAuth(userName);
+    }
+
+    List<OzoneVolume> volumes = client.listVolumes(userName, prefix, maxKeys,
+        startVolume);
+    if (volumes != null) {
+      if (cmd.hasOption(Shell.VERBOSE)) {
+        System.out.printf("Found : %d volumes for user : %s %n", volumes.size(),
+            userName);
+      }
+
+      List<VolumeInfo> jsonData = volumes.stream()
+          .map(OzoneVolume::getVolumeInfo).collect(Collectors.toList());
+      System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(
+          JsonUtils.toJsonString(jsonData)));
+    }
+  }
+}
+
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/UpdateVolumeHandler.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/UpdateVolumeHandler.java
new file mode 100644
index 0000000..164fe34
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/UpdateVolumeHandler.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web.ozShell.volume;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.hadoop.ozone.web.client.OzoneRestClientException;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.Handler;
+import org.apache.hadoop.ozone.web.ozShell.Shell;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Executes update volume calls.
+ */
+public class UpdateVolumeHandler extends Handler {
+  private String rootName;
+  private String ownerName;
+  private String volumeName;
+  private String quota;
+
+  /**
+   * Executes update volume calls.
+   *
+   * @param cmd - CommandLine
+   * @throws IOException
+   * @throws OzoneException
+   * @throws URISyntaxException
+   */
+  @Override
+  protected void execute(CommandLine cmd)
+      throws IOException, OzoneException, URISyntaxException {
+    if (!cmd.hasOption(Shell.UPDATE_VOLUME)) {
+      throw new OzoneRestClientException(
+          "Incorrect call : updateVolume is missing");
+    }
+
+    String ozoneURIString = cmd.getOptionValue(Shell.UPDATE_VOLUME);
+    URI ozoneURI = verifyURI(ozoneURIString);
+    if (ozoneURI.getPath().isEmpty()) {
+      throw new OzoneRestClientException(
+          "Volume name is required to update a volume");
+    }
+
+    // we need to skip the slash in the URI path
+    volumeName = ozoneURI.getPath().substring(1);
+
+    if (cmd.hasOption(Shell.RUNAS)) {
+      rootName = "hdfs";
+    } else {
+      rootName = System.getProperty("user.name");
+    }
+
+    if (cmd.hasOption(Shell.QUOTA)) {
+      quota = cmd.getOptionValue(Shell.QUOTA);
+    }
+
+    if (cmd.hasOption(Shell.USER)) {
+      ownerName = cmd.getOptionValue(Shell.USER);
+    }
+
+    client.setEndPointURI(ozoneURI);
+    client.setUserAuth(rootName);
+
+    if (quota != null && !quota.isEmpty()) {
+      client.setVolumeQuota(volumeName, quota);
+    }
+
+    if (ownerName != null && !ownerName.isEmpty()) {
+      client.setVolumeOwner(volumeName, ownerName);
+    }
+
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/package-info.java
new file mode 100644
index 0000000..fc19274
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/ozShell/volume/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Netty-based HTTP server implementation for Ozone.
+ */
+package org.apache.hadoop.ozone.web.ozShell.volume;
\ No newline at end of file
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/package-info.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/package-info.java
new file mode 100644
index 0000000..1a7275c
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/web/package-info.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.web;
+
+/**
+ * This package contains generic class for the internal http server
+ * and REST interfaces.
+ */
diff --git a/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneCommandShell.md b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneCommandShell.md
new file mode 100644
index 0000000..fc63742
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneCommandShell.md
@@ -0,0 +1,150 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+Ozone Command Shell
+===================
+
+Ozone command shell gives a command shell interface to work against ozone.
+Please note that this  document assumes that cluster is deployed
+with simple authentication.
+
+The Ozone commands take the following format.
+
+* `ozone oz --command_ http://hostname:port/volume/bucket/key -user
+<name> -root`
+
+The *port* specified in command should match the port mentioned in the config
+property `hdds.rest.http-address`. This property can be set in `ozone-site.xml`.
+The default value for the port is `9880` and is used in below commands.
+
+The *-root* option is a command line short cut that allows *ozone oz*
+commands to be run as the user that started the cluster. This is useful to
+indicate that you want the commands to be run as some admin user. The only
+reason for this option is that it makes the life of a lazy developer more
+easier.
+
+Ozone Volume Commands
+--------------------
+
+The volume commands allow users to create, delete and list the volumes in the
+ozone cluster.
+
+### Create Volume
+
+Volumes can be created only by Admins. Here is an example of creating a volume.
+
+* `ozone oz -createVolume http://localhost:9880/hive -user bilbo -quota
+100TB -root`
+
+The above command creates a volume called `hive` owned by user `bilbo`. The
+`-root` option allows the command to be executed as user `hdfs` which is an
+admin in the cluster.
+
+### Update Volume
+
+Updates information like ownership and quota on an existing volume.
+
+* `ozone oz  -updateVolume  http://localhost:9880/hive -quota 500TB -root`
+
+The above command changes the volume quota of hive from 100TB to 500TB.
+
+### Delete Volume
+Deletes a Volume if it is empty.
+
+* `ozone oz -deleteVolume http://localhost:9880/hive -root`
+
+
+### Info Volume
+Info volume command allows the owner or the administrator of the cluster to read meta-data about a specific volume.
+
+* `ozone oz -infoVolume http://localhost:9880/hive -root`
+
+### List Volumes
+
+List volume command can be used by administrator to list volumes of any user. It can also be used by a user to list volumes owned by him.
+
+* `ozone oz -listVolume http://localhost:9880/ -user bilbo -root`
+
+The above command lists all volumes owned by user bilbo.
+
+Ozone Bucket Commands
+--------------------
+
+Bucket commands follow a similar pattern as volume commands. However bucket commands are designed to be run by the owner of the volume.
+Following examples assume that these commands are run by the owner of the volume or bucket.
+
+
+### Create Bucket
+
+Create bucket call allows the owner of a volume to create a bucket.
+
+* `ozone oz -createBucket http://localhost:9880/hive/january`
+
+This call creates a bucket called `january` in the volume called `hive`. If
+the volume does not exist, then this call will fail.
+
+
+### Update Bucket
+Updates bucket meta-data, like ACLs.
+
+* `ozone oz -updateBucket http://localhost:9880/hive/january  -addAcl
+user:spark:rw`
+
+### Delete Bucket
+Deletes a bucket if it is empty.
+
+* `ozone oz -deleteBucket http://localhost:9880/hive/january`
+
+### Info Bucket
+Returns information about a given bucket.
+
+* `ozone oz -infoBucket http://localhost:9880/hive/january`
+
+### List Buckets
+List buckets on a given volume.
+
+* `ozone oz -listBucket http://localhost:9880/hive`
+
+Ozone Key Commands
+------------------
+
+Ozone key commands allows users to put, delete and get keys from ozone buckets.
+
+### Put Key
+Creates or overwrites a key in ozone store, -file points to the file you want
+to upload.
+
+* `ozone oz -putKey  http://localhost:9880/hive/january/processed.orc  -file
+processed.orc`
+
+### Get Key
+Downloads a file from the ozone bucket.
+
+* `ozone oz -getKey  http://localhost:9880/hive/january/processed.orc  -file
+  processed.orc.copy`
+
+### Delete Key
+Deletes a key  from the ozone store.
+
+* `ozone oz -deleteKey http://localhost:9880/hive/january/processed.orc`
+
+### Info Key
+Reads  key metadata from the ozone store.
+
+* `ozone oz -infoKey http://localhost:9880/hive/january/processed.orc`
+
+### List Keys
+List all keys in an ozone bucket.
+
+* `ozone oz -listKey  http://localhost:9880/hive/january`
diff --git a/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneGettingStarted.md.vm b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneGettingStarted.md.vm
new file mode 100644
index 0000000..9e96098
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneGettingStarted.md.vm
@@ -0,0 +1,347 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+Ozone - Object store for Hadoop
+==============================
+
+Introduction
+------------
+Ozone is an object store for Hadoop. It  is a redundant, distributed object
+store build by leveraging primitives present in HDFS. Ozone supports REST
+API for accessing the store.
+
+Getting Started
+---------------
+Ozone is a work in progress and currently lives in the hadoop source tree.
+The subprojects (ozone/hdds) are part of the hadoop source tree but by default
+not compiled and not part of the official releases. To
+use it, you have to build a package by yourself and deploy a cluster.
+
+### Building Ozone
+
+To build Ozone, please checkout the hadoop sources from github. Then
+checkout the trunk branch and build it.
+
+`mvn clean package -DskipTests=true -Dmaven.javadoc.skip=true -Pdist -Phdds -Dtar -DskipShade`
+
+skipShade is just to make compilation faster and not really required.
+
+This will give you a tarball in your distribution directory. This is the
+tarball that can be used for deploying your hadoop cluster. Here is an
+example of the tarball that will be generated.
+
+* `~/apache/hadoop/hadoop-dist/target/${project.version}.tar.gz`
+
+At this point we have an option to setup a physical cluster or run ozone via
+docker.
+
+Running Ozone via Docker
+------------------------
+
+This assumes that you have a running docker setup on the machine. Please run
+these following commands to see ozone in action.
+
+ Go to the directory where the docker compose files exist.
+
+
+ - `cd hadoop-dist/target/compose/ozone`
+
+Tell docker to start ozone, this will start a KSM, SCM and a single datanode in
+the background.
+
+
+ - `docker-compose up -d`
+
+Now let us run some work load against ozone, to do that we will run freon.
+
+This will log into the datanode and run bash.
+
+ - `docker-compose exec datanode bash`
+
+Now you can run the `ozone` command shell or freon, the ozone load generator.
+
+This is the command to run freon.
+
+ - `ozone freon -mode offline -validateWrites -numOfVolumes 1 -numOfBuckets 10 -numOfKeys 100`
+
+You can checkout the KSM UI to see the requests information.
+
+ - `http://localhost:9874/`
+
+If you need more datanode you can scale up:
+
+ - `docker-compose scale datanode=3`
+
+Running Ozone using a real cluster
+----------------------------------
+
+Please proceed to setup a hadoop cluster by creating the hdfs-site.xml and
+other configuration files that are needed for your cluster.
+
+
+### Ozone Configuration
+
+Ozone relies on its own configuration file called `ozone-site.xml`. It is
+just for convenience and ease of management --  you can add these settings
+to `hdfs-site.xml`, if you don't want to keep ozone settings separate.
+This document refers to `ozone-site.xml` so that ozone settings are in one
+place  and not mingled with HDFS settings.
+
+ * _*ozone.enabled*_  This is the most important setting for ozone.
+ Currently, Ozone is an opt-in subsystem of HDFS. By default, Ozone is
+ disabled. Setting this flag to `true` enables ozone in the HDFS cluster.
+ Here is an example,
+
+```
+    <property>
+       <name>ozone.enabled</name>
+       <value>True</value>
+    </property>
+```
+ *  _*ozone.metadata.dirs*_ Ozone is designed with modern hardware
+ in mind. It tries to use SSDs effectively. So users can specify where the
+ metadata must reside. Usually you pick your fastest disk (SSD if
+ you have them on your nodes). KSM, SCM and datanode will write the metadata
+ to these disks. This is a required setting, if this is missing Ozone will
+ fail to come up. Here is an example,
+
+```
+   <property>
+      <name>ozone.metadata.dirs</name>
+      <value>/data/disk1/meta</value>
+   </property>
+```
+
+* _*ozone.scm.names*_ Ozone is build on top of container framework. Storage
+ container manager(SCM) is a distributed block service which is used by ozone
+ and other storage services.
+ This property allows datanodes to discover where SCM is, so that
+ datanodes can send heartbeat to SCM. SCM is designed to be highly available
+ and datanodes assume there are multiple instances of SCM which form a highly
+ available ring. The HA feature of SCM is a work in progress. So we
+ configure ozone.scm.names to be a single machine. Here is an example,
+
+```
+    <property>
+      <name>ozone.scm.names</name>
+      <value>scm.hadoop.apache.org</value>
+    </property>
+```
+
+* _*ozone.scm.datanode.id*_ Each datanode that speaks to SCM generates an ID
+just like HDFS.  This is an optional setting. Please note:
+This path will be created by datanodes if it doesn't exist already. Here is an
+ example,
+
+```
+   <property>
+      <name>ozone.scm.datanode.id</name>
+      <value>/data/disk1/scm/meta/node/datanode.id</value>
+   </property>
+```
+
+* _*ozone.scm.block.client.address*_ Storage Container Manager(SCM) offers a
+ set of services that can be used to build a distributed storage system. One
+ of the services offered is the block services. KSM and HDFS would use this
+ service. This property describes where KSM can discover SCM's block service
+ endpoint. There is corresponding ports etc, but assuming that we are using
+ default ports, the server address is the only required field. Here is an
+ example,
+
+```
+    <property>
+      <name>ozone.scm.block.client.address</name>
+      <value>scm.hadoop.apache.org</value>
+    </property>
+```
+
+* _*ozone.ksm.address*_ KSM server address. This is used by Ozonehandler and
+Ozone File System.
+
+```
+    <property>
+       <name>ozone.ksm.address</name>
+       <value>ksm.hadoop.apache.org</value>
+    </property>
+```
+
+* _*dfs.datanode.plugin*_ Datanode service plugins: the container manager part
+ of ozone is running inside the datanode as a service plugin. To activate ozone
+ you should define the service plugin implementation class. **Important**
+ It should be added to the **hdfs-site.xml** as the plugin should be activated
+ as part of the normal HDFS Datanode bootstrap.
+
+```
+    <property>
+       <name>dfs.datanode.plugins</name>
+       <value>org.apache.hadoop.ozone.HddsDatanodeService</value>
+    </property>
+```
+
+Here is a quick summary of settings needed by Ozone.
+
+| Setting                        | Value                        | Comment |
+|--------------------------------|------------------------------|------------------------------------------------------------------|
+| ozone.enabled                  | True                         | This enables SCM and  containers in HDFS cluster.                |
+| ozone.metadata.dirs            | file path                    | The metadata will be stored here.                                |
+| ozone.scm.names                | SCM server name              | Hostname:port or or IP:port address of SCM.                      |
+| ozone.scm.block.client.address | SCM server name and port     | Used by services like KSM                                        |
+| ozone.scm.client.address       | SCM server name and port     | Used by client side                                              |
+| ozone.scm.datanode.address     | SCM server name and port     | Used by datanode to talk to SCM                                  |
+| ozone.ksm.address              | KSM server name              | Used by Ozone handler and Ozone file system.                     |
+
+ Here is a working example of`ozone-site.xml`.
+
+```
+    <?xml version="1.0" encoding="UTF-8"?>
+    <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+    <configuration>
+      <property>
+          <name>ozone.enabled</name>
+          <value>True</value>
+        </property>
+
+        <property>
+          <name>ozone.metadata.dirs</name>
+          <value>/data/disk1/ozone/meta</value>
+        </property>
+
+        <property>
+          <name>ozone.scm.names</name>
+          <value>127.0.0.1</value>
+        </property>
+
+        <property>
+           <name>ozone.scm.client.address</name>
+           <value>127.0.0.1:9860</value>
+        </property>
+
+         <property>
+           <name>ozone.scm.block.client.address</name>
+           <value>127.0.0.1:9863</value>
+         </property>
+
+         <property>
+           <name>ozone.scm.datanode.address</name>
+           <value>127.0.0.1:9861</value>
+         </property>
+
+         <property>
+           <name>ozone.ksm.address</name>
+           <value>127.0.0.1:9874</value>
+         </property>
+    </configuration>
+```
+
+And don't forget to enable the datanode component with adding the
+following configuration to the hdfs-site.xml:
+
+```
+    <property>
+       <name>dfs.datanode.plugins</name>
+       <value>org.apache.hadoop.ozone.HddsDatanodeService</value>
+    </property>
+```
+
+### Starting Ozone
+
+Ozone is designed to run concurrently with HDFS. The simplest way to [start
+HDFS](../hadoop-common/ClusterSetup.html) is to run `start-dfs.sh` from the
+`$HADOOP/sbin/start-dfs.sh`. Once HDFS
+is running, please verify it is fully functional by running some commands like
+
+   - *./hdfs dfs -mkdir /usr*
+   - *./hdfs dfs -ls /*
+
+ Once you are sure that HDFS is running, start Ozone. To start  ozone, you
+ need to start SCM and KSM. Currently we assume that both KSM and SCM
+  is running on the same node, this will change in future.
+
+ The first time you bring up Ozone, SCM must be initialized.
+
+   - `./ozone scm -init`
+
+ Start SCM.
+
+   - `./ozone --daemon start scm`
+
+ Once SCM gets started, KSM must be initialized.
+
+   - `./ozone ksm -createObjectStore`
+
+ Start KSM.
+
+   - `./ozone --daemon start ksm`
+
+if you would like to start HDFS and Ozone together, you can do that by running
+ a single command.
+ - `$HADOOP/sbin/start-ozone.sh`
+
+ This command will start HDFS and then start the ozone components.
+
+ Once you have ozone running you can use these ozone [shell](./OzoneCommandShell.html)
+ commands to  create a  volume, bucket and keys.
+
+### Diagnosing issues
+
+Ozone tries not to pollute the existing HDFS streams of configuration and
+logging. So ozone logs are by default configured to be written to a file
+called `ozone.log`. This is controlled by the settings in `log4j.properties`
+file in the hadoop configuration directory.
+
+Here is the log4j properties that are added by ozone.
+
+
+```
+   #
+   # Add a logger for ozone that is separate from the Datanode.
+   #
+   #log4j.debug=true
+   log4j.logger.org.apache.hadoop.ozone=DEBUG,OZONE,FILE
+
+   # Do not log into datanode logs. Remove this line to have single log.
+   log4j.additivity.org.apache.hadoop.ozone=false
+
+   # For development purposes, log both to console and log file.
+   log4j.appender.OZONE=org.apache.log4j.ConsoleAppender
+   log4j.appender.OZONE.Threshold=info
+   log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout
+   log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+    %X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n
+
+   # Real ozone logger that writes to ozone.log
+   log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
+   log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log
+   log4j.appender.FILE.Threshold=debug
+   log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
+   log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+     (%F:%L) %X{function} %X{resource} %X{user} %X{request} - \
+      %m%n
+```
+
+If you would like to have a single datanode log instead of ozone stuff
+getting written to ozone.log, please remove this line or set this to true.
+
+ ` log4j.additivity.org.apache.hadoop.ozone=false`
+
+On the SCM/KSM side, you will be able to see
+
+  - `hadoop-hdfs-ksm-hostname.log`
+  - `hadoop-hdfs-scm-hostname.log`
+
+Please file any issues you see under the related issues:
+
+ - [Object store in HDFS: HDFS-7240](https://issues.apache.org/jira/browse/HDFS-7240)
+ - [Ozone File System: HDFS-13074](https://issues.apache.org/jira/browse/HDFS-13074)
+ - [Building HDFS on top of new storage layer (HDDS): HDFS-10419](https://issues.apache.org/jira/browse/HDFS-10419)
+
diff --git a/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneMetrics.md b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneMetrics.md
new file mode 100644
index 0000000..f5eccf6
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneMetrics.md
@@ -0,0 +1,166 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+
+
+HDFS Ozone Metrics
+===============
+
+<!-- MACRO{toc|fromDepth=0|toDepth=3} -->
+
+Overview
+--------
+
+The container metrics that is used in HDFS Ozone.
+
+### Storage Container Metrics
+
+The metrics for various storage container operations in HDFS Ozone.
+
+Storage container is an optional service that can be enabled by setting
+'ozone.enabled' to true.
+These metrics are only available when ozone is enabled.
+
+Storage Container Metrics maintains a set of generic metrics for all
+container RPC calls that can be made to a datandoe/container.
+
+Along with the total number of RPC calls containers maintain a set of metrics
+for each RPC call. Following is the set of counters maintained for each RPC
+operation.
+
+*Total number of operation* - We maintain an array which counts how
+many times a specific operation has been performed.
+Eg.`NumCreateContainer` tells us how many times create container has been
+invoked on this datanode.
+
+*Total number of pending operation* - This is an array which counts how
+many times a specific operation is waitting to be processed from the client
+point of view.
+Eg.`NumPendingCreateContainer` tells us how many create container requests that
+waitting to be processed.
+
+*Average latency of each pending operation in nanoseconds* - The average latency
+of the operation from the client point of view.
+Eg. `CreateContainerLatencyAvgTime` - This tells us the average latency of
+Create Container from the client point of view.
+
+*Number of bytes involved in a specific command* - This is an array that is
+maintained for all operations, but makes sense only for read and write
+operations.
+
+While it is possible to read the bytes in update container, it really makes
+no sense, since no data stream involved. Users are advised to use this
+metric only when it makes sense. Eg. `BytesReadChunk` -- Tells us how
+many bytes have been read from this data using Read Chunk operation.
+
+*Average Latency of each operation* - The average latency of the operation.
+Eg. `LatencyCreateContainerAvgTime` - This tells us the average latency of
+Create Container.
+
+*Quantiles for each of these operations* - The 50/75/90/95/99th percentile
+of these operations. Eg. `CreateContainerNanos60s50thPercentileLatency` --
+gives latency of the create container operations at the 50th percentile latency
+(1 minute granularity). We report 50th, 75th, 90th, 95th and 99th percentile
+for all RPCs.
+
+So this leads to the containers reporting these counters for each of these
+RPC operations.
+
+| Name | Description |
+|:---- |:---- |
+| `NumOps` | Total number of container operations |
+| `CreateContainer` | Create container operation |
+| `ReadContainer` | Read container operation |
+| `UpdateContainer` | Update container operations |
+| `DeleteContainer` | Delete container operations |
+| `ListContainer` | List container operations |
+| `PutKey` | Put key operations |
+| `GetKey` | Get key operations |
+| `DeleteKey` | Delete key operations |
+| `ListKey` | List key operations |
+| `ReadChunk` | Read chunk operations |
+| `DeleteChunk` | Delete chunk operations |
+| `WriteChunk` | Write chunk operations|
+| `ListChunk` | List chunk operations |
+| `CompactChunk` | Compact chunk operations |
+| `PutSmallFile` | Put small file operations |
+| `GetSmallFile` | Get small file operations |
+| `CloseContainer` | Close container operations |
+
+### Storage Container Manager Metrics
+
+The metrics for containers that managed by Storage Container Manager.
+
+Storage Container Manager (SCM) is a master service which keeps track of
+replicas of storage containers. It also manages all data nodes and their
+states, dealing with container reports and dispatching commands for execution.
+
+Following are the counters for containers:
+
+| Name | Description |
+|:---- |:---- |
+| `LastContainerReportSize` | Total size in bytes of all containers in latest container report that SCM received from datanode |
+| `LastContainerReportUsed` | Total number of bytes used by all containers in latest container report that SCM received from datanode |
+| `LastContainerReportKeyCount` | Total number of keys in all containers in latest container report that SCM received from datanode |
+| `LastContainerReportReadBytes` | Total number of bytes have been read from all containers in latest container report that SCM received from datanode |
+| `LastContainerReportWriteBytes` | Total number of bytes have been written into all containers in latest container report that SCM received from datanode |
+| `LastContainerReportReadCount` | Total number of times containers have been read from in latest container report that SCM received from datanode |
+| `LastContainerReportWriteCount` | Total number of times containers have been written to in latest container report that SCM received from datanode |
+| `ContainerReportSize` | Total size in bytes of all containers over whole cluster |
+| `ContainerReportUsed` | Total number of bytes used by all containers over whole cluster |
+| `ContainerReportKeyCount` | Total number of keys in all containers over whole cluster |
+| `ContainerReportReadBytes` | Total number of bytes have been read from all containers over whole cluster |
+| `ContainerReportWriteBytes` | Total number of bytes have been written into all containers over whole cluster |
+| `ContainerReportReadCount` | Total number of times containers have been read from over whole cluster |
+| `ContainerReportWriteCount` | Total number of times containers have been written to over whole cluster |
+
+### Key Space Metrics
+
+The metrics for various key space manager operations in HDFS Ozone.
+
+key space manager (KSM) is a service that similar to the Namenode in HDFS.
+In the current design of KSM, it maintains metadata of all volumes, buckets and keys.
+These metrics are only available when ozone is enabled.
+
+Following is the set of counters maintained for each key space operation.
+
+*Total number of operation* - We maintain an array which counts how
+many times a specific operation has been performed.
+Eg.`NumVolumeCreate` tells us how many times create volume has been
+invoked in KSM.
+
+*Total number of failed operation* - This type operation is opposite to the above
+operation.
+Eg.`NumVolumeCreateFails` tells us how many times create volume has been invoked
+failed in KSM.
+
+Following are the counters for each of key space operations.
+
+| Name | Description |
+|:---- |:---- |
+| `VolumeCreate` | Create volume operation |
+| `VolumeUpdates` | Update volume property operation |
+| `VolumeInfos` | Get volume information operation |
+| `VolumeCheckAccesses` | Check volume access operation |
+| `VolumeDeletes` | Delete volume operation |
+| `VolumeLists` | List volume operation |
+| `BucketCreates` | Create bucket operation |
+| `BucketInfos` | Get bucket information operation |
+| `BucketUpdates` | Update bucket property operation |
+| `BucketDeletes` | Delete bucket operation |
+| `BucketLists` | List bucket operation |
+| `KeyAllocate` | Allocate key operation |
+| `KeyLookup` | Look up key operation |
+| `KeyDeletes` | Delete key operation |
+| `KeyLists` | List key operation |
\ No newline at end of file
diff --git a/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneOverview.md b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneOverview.md
new file mode 100644
index 0000000..41d7dbd
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneOverview.md
@@ -0,0 +1,88 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+Ozone Overview
+==============
+
+
Ozone is an Object store for Apache Hadoop. It aims to scale to billions of
+keys. 
The following is a high-level overview of the core components of Ozone.


+
+![Ozone Architecture Overview](images/ozoneoverview.png) 


+
+The main elements of Ozone are
:
+
+### Clients
+Ozone ships with a set of ready-made clients. They are 
Ozone CLI and Freon.

+
+    * [Ozone CLI](./OzoneCommandShell.html) is the command line interface like 'hdfs' command.

+
+    * Freon is a  load generation tool for Ozone.

+
+### REST Handler
+Ozone provides both an RPC (Remote Procedure Call) as well as a  REST
+(Representational State Transfer) style interface. This allows clients to be
+written in many languages quickly. Ozone strives to maintain a similar
+interface between REST and RPC. The Rest handler offers the REST protocol
+services of Ozone.
+
+For most purposes, a client can make one line change to switch from REST to
+RPC or vice versa.  

+
+### Ozone File System
+Ozone file system (TODO: Add documentation) is a Hadoop compatible file system.
+This is the important user-visible component of ozone.
+This allows Hadoop services and applications like Hive/Spark to run against
+Ozone without any change.
+
+### Ozone Client
+This is like DFSClient in HDFS. This acts as the standard client to talk to
+Ozone. All other components that we have discussed so far rely on Ozone client
+(TODO: Add Ozone client documentation).

+
+### Key Space Manager

+Key Space Manager(KSM) takes care of the Ozone's namespace.
+All ozone entities like volumes, buckets and keys are managed by KSM
+(TODO: Add KSM documentation). In Short, KSM is the metadata manager for Ozone.
+KSM talks to blockManager(SCM) to get blocks and passes it on to the Ozone
+client.  Ozone client writes data to these blocks.
+KSM will eventually be replicated via Apache Ratis for High Availability.

+
+### Storage Container Manager
+Storage Container Manager (SCM) is the block and cluster manager for Ozone.
+SCM along with data nodes offer a service called 'containers'.
+A container is a group unrelated of blocks that are managed together
+as a single entity.
+
+SCM offers the following abstractions.


+
+![SCM Abstractions](images/scmservices.png)
+#### Blocks
+Blocks are like blocks in HDFS. They are replicated store of data.
+
+#### Containers
+A collection of blocks replicated and managed together.
+
+#### Pipelines
+SCM allows each container to choose its method of replication.
+For example, a container might decide that it needs only one copy of a  block
+and might choose a stand-alone pipeline. Another container might want to have
+a very high level of reliability and pick a RATIS based pipeline. In other
+words, SCM allows different kinds of replication strategies to co-exist.
+
+#### Pools
+A group of data nodes is called a pool. For scaling purposes,
+we define a pool as a set of machines. This makes management of datanodes
+easier.
+
+#### Nodes
+The data node where data is stored.
diff --git a/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneRest.md b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneRest.md
new file mode 100644
index 0000000..13fe00d
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/site/markdown/OzoneRest.md
@@ -0,0 +1,549 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+Ozone REST API's.
+===================
+
+<!-- MACRO{toc|fromDepth=0|toDepth=1} -->
+
+Overview
+--------
+
+The Ozone REST API's allows user to access ozone via  REST protocol.
+
+Authentication and Authorization
+--------------------
+
+For time being, The default authentication mode of REST API is insecure access
+mode, which is *Simple* mode. Under this mode, ozone server trusts the user
+name specified by client and it does not perform any authentication.
+
+User name can be specified in HTTP header by
+
+* `x-ozone-user: {USER_NAME}`
+
+for example if add following header *x-ozone-user: bilbo* in the HTTP request,
+then operation will be executed as *bilbo* user.
+In *Simple* mode, there is no real authorization either. Client can be
+authorized to obtain administrator privilege by using HTTP header
+
+* `Authorization: {AUTH_METHOD} {SIGNATURE}`
+
+for example set following header *Authorization: OZONE root* in the HTTP request,
+then ozone will authorize the client with administrator privilege.
+
+Common REST Headers
+--------------------
+
+The following HTTP headers must be set for each REST call.
+
+| Property | Description |
+|:---- |:----
+| Authorization | The authorization field determines which authentication method is used by ozone. Currently only *simple* mode is supported, the corresponding value is *OZONE*. Optionally an user name can be set as *OZONE {USER_NAME}* to authorize as a particular user. |
+| Date | Standard HTTP header that represents dates. The format is - day of the week, month, day, year and time (military time format) in GMT. Any other time zone will be rejected by ozone server. Eg. *Date : Mon, Apr 4, 2016 06:22:00 GMT*. This field is required. |
+| x-ozone-version | A required HTTP header to indicate which version of API this call will be communicating to. E.g *x-ozone-version: v1*. Currently ozone only publishes v1 version API. |
+
+Common Reply Headers
+--------------------
+
+The common reply headers are part of all Ozone server replies.
+
+| Property | Description |
+|:---- |:----
+| Date | This is the HTTP date header and it is set to server’s local time expressed in GMT. |
+| x-ozone-request-id | This is a UUID string that represents an unique request ID. This ID is used to track the request through the ozone system and is useful for debugging purposes. |
+| x-ozone-server-name | Fully qualified domain name of the sever which handled the request. |
+
+Volume APIs
+--------------------
+
+### Create a Volume
+
+This API allows admins to create a new storage volume.
+
+Schema:
+
+- `POST /{volume}?quota=<VOLUME_QUOTA>`
+
+Query Parameter:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| quota | long<BYTES \| MB \| GB \| TB> | Optional. Quota size in BYTEs, MBs, GBs or TBs |
+
+Sample HTTP POST request:
+
+    curl -i -X POST -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE root" "http://localhost:9880/volume-to-create"
+
+this request creates a volume as user *bilbo*, the authorization field is set to *OZONE root* because this call requires administration privilege. The client receives a response with zero content length.
+
+    HTTP/1.1 201 Created
+    x-ozone-server-name: localhost
+    x-ozone-request-id: 2173deb5-bbb7-4f0a-8236-f354784e3bae
+    Date: Tue, 27 Jun 2017 07:42:04 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 0
+    Connection: keep-alive
+
+### Update Volume
+
+This API allows administrators to update volume info such as ownership and quota. This API requires administration privilege.
+
+Schema:
+
+- `PUT /{volume}?quota=<VOLUME_QUOTA>`
+
+Query Parameter:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| quota | long<BYTES \| MB \| GB \| TB>  \| remove | Optional. Quota size in BYTEs, MBs, GBs or TBs. Or use string value *remove* to remove an existing quota for a volume. |
+
+Sample HTTP PUT request:
+
+    curl -X PUT -H "Authorization:OZONE root" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "x-ozone-version: v1" -H "x-ozone-user: john"  http://localhost:9880/volume-to-update
+
+this request modifies the owner of */volume-to-update* to *john*.
+
+### Delete Volume
+
+This API allows user to delete a volume owned by themselves if the volume is not empty. Administrators can delete volumes owned by any user.
+
+Schema:
+
+- `DELETE /{volume}`
+
+Sample HTTP DELETE request:
+
+    curl -i -X DELETE -H "Authorization:OZONE root" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "x-ozone-version: v1" -H "x-ozone-user: bilbo"  http://localhost:9880/volume-to-delete
+
+this request deletes an empty volume */volume-to-delete*. The client receives a zero length content.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: 6af14c64-e3a9-40fe-9634-df60b7cbbc6a
+    Date: Tue, 27 Jun 2017 08:49:52 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 0
+    Connection: keep-alive
+
+### Info Volume
+
+This API allows user to read the info of a volume owned by themselves. Administrators can read volume info owned by any user.
+
+Schema:
+
+- `GET /{volume}?info=volume`
+
+Query Parameter:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| info | "volume" | Required and enforced with this value. |
+
+Sample HTTP GET request:
+
+    curl -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http://localhost:9880/volume-of-bilbo?info=volume"
+
+this request gets the info of volume */volume-of-bilbo*, the client receives a response with a JSON object of volume info.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: a2224806-beaf-42dd-a68e-533cd7508f74
+    Date: Tue, 27 Jun 2017 07:55:35 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 171
+    Connection: keep-alive
+
+    {
+      "owner" : { "name" : "bilbo" },
+      "quota" : { "unit" : "TB", "size" : 1048576 },
+      "volumeName" : "volume-of-bilbo",
+      "createdOn" : "Tue, 27 Jun 2017 07:42:04 GMT",
+      "createdBy" : "root"
+    }
+
+### List Volumes
+
+This API allows user to list all volumes owned by themselves. Administrators can list all volumes owned by any user.
+
+Schema:
+
+- `GET /?prefix=<PREFIX>&max-keys=<MAX_RESULT_SIZE>&prev-key=<PREVIOUS_VOLUME_KEY>`
+
+Query Parameter:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| prefix | string | Optional. Only volumes with this prefix are included in the result. |
+| max-keys | int | Optional. Maximum number of volumes included in the result. Default is 1024 if not specified. |
+| prev-key | string | Optional. Volume name from where listing should start, this key is excluded in the result. It must be a valid volume name. |
+| root-scan | bool | Optional. List all volumes in the cluster if this is set to true. Default false. |
+
+Sample HTTP GET request:
+
+    curl -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http://localhost:9880/?max-keys=100&prefix=Jan"
+
+this request gets all volumes owned by *bilbo* and each volume's name contains prefix *Jan*, the result at most contains *100* entries. The client receives a list of SON objects, each of them describes the info of a volume.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: 7fa0dce1-a8bd-4387-bc3c-1dac4b710bb1
+    Date: Tue, 27 Jun 2017 08:07:04 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 602
+    Connection: keep-alive
+
+    {
+      "volumes" : [
+        {
+          "owner" : { "name" : "bilbo"},
+          "quota" : { "unit" : "TB", "size" : 2 },
+          "volumeName" : "Jan-vol1",
+          "createdOn" : "Tue, 27 Jun 2017 07:42:04 GMT",
+          "createdBy" : root
+      },
+      ...
+      ]
+    }
+
+Bucket APIs
+--------------------
+
+### Create Bucket
+
+This API allows an user to create a bucket in a volume.
+
+Schema:
+
+- `POST /{volume}/{bucket}`
+
+Additional HTTP Headers:
+
+| HTTP Header | Value | Description |
+|:---- |:---- |:----
+| x-ozone-acl | ozone ACLs | Optional. Ozone acls. |
+| x-ozone-storage-class | <DEFAULT \| ARCHIVE \| DISK \| RAM_DISK \| SSD > | Optional. Storage type for a volume. |
+| x-ozone-bucket-versioning | enabled/disabled | Optional. Do enable bucket versioning or not. |
+
+Sample HTTP POST request:
+
+    curl -i -X POST -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" http://localhost:9880/volume-of-bilbo/bucket-0
+
+this request creates a bucket *bucket-0* under volume *volume-of-bilbo*.
+
+    HTTP/1.1 201 Created
+    x-ozone-server-name: localhost
+    x-ozone-request-id: 49acfeec-4c85-470a-872b-2eaebd8d751e
+    Date: Tue, 27 Jun 2017 08:55:25 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 0
+    Connection: keep-alive
+
+### Update Bucket
+
+Updates bucket meta-data, like ACLs.
+
+Schema:
+
+- `PUT /{volume}/{bucket}`
+
+Additional HTTP Headers:
+
+| HTTP Header | Value | Description |
+|:---- |:---- |:----
+| x-ozone-acl | ozone ACLs | Optional. Ozone acls. |
+| x-ozone-bucket-versioning | enabled/disabled | Optional. Do enable bucket versioning or not. |
+
+Sample HTTP PUT request:
+
+    curl -i -X PUT -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" -H "x-ozone-acl: ADD user:peregrin:rw" http://localhost:9880/volume-of-bilbo/bucket-to-update
+
+this request adds an ACL policy specified by HTTP header *x-ozone-acl* to bucket */volume-of-bilbo/bucket-to-update*, the ACL field *ADD user:peregrin:rw* gives add additional read/write permission to user *peregrin* to this bucket.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: b061a295-5faf-4b98-94b9-8b3e87c8eb5e
+    Date: Tue, 27 Jun 2017 09:02:37 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 0
+    Connection: keep-alive
+
+### Delete Bucket
+
+Deletes a bucket if it is empty. An user can only delete bucket owned by themselves, and administrators can delete buckets owned by any user, as long as it is empty.
+
+Schema:
+
+- `DELETE /{volume}/{bucket}`
+
+Sample HTTP DELETE request:
+
+    curl -i -X DELETE -H "Authorization:OZONE root" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "x-ozone-version: v1" -H "x-ozone-user:bilbo" "http://localhost:9880/volume-of-bilbo/bucket-0"
+
+this request deletes bucket */volume-of-bilbo/bucket-0*. The client receives a zero length content response.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: f57acd7a-2116-4c2f-aa2f-5a483db81c9c
+    Date: Tue, 27 Jun 2017 09:16:52 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 0
+    Connection: keep-alive
+
+
+### Info Bucket
+
+This API returns information about a given bucket.
+
+Schema:
+
+- `GET /{volume}/{bucket}?info=bucket`
+
+Query Parameters:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| info | "bucket" | Required and enforced with this value. |
+
+Sample HTTP GET request:
+
+    curl -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http://localhost:9880/volume-of-bilbo/bucket-0?info=bucket"
+
+this request gets the info of bucket */volume-of-bilbo/bucket-0*. The client receives a response of JSON object contains bucket info.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: f125485b-8cae-4c7f-a2d6-5b1fefd6f193
+    Date: Tue, 27 Jun 2017 09:08:31 GMT
+    Content-Type: application/json
+    Content-Length: 138
+    Connection: keep-alive
+
+    {
+      "volumeName" : "volume-of-bilbo",
+      "bucketName" : "bucket-0",
+      "createdOn" : "Tue, 27 Jun 2017 08:55:25 GMT",
+      "acls" : [ ],
+      "versioning" : "DISABLED",
+      "storageType" : "DISK"
+    }
+
+### List Buckets
+
+List buckets in a given volume.
+
+Schema:
+
+- `GET /{volume}?prefix=<PREFIX>&max-keys=<MAX_RESULT_SIZE>&prev-key=<PREVIOUS_BUCKET_KEY>`
+
+Query Parameters:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| prefix | string | Optional. Only buckets with this prefix are included in the result. |
+| max-keys | int | Optional. Maximum number of buckets included in the result. Default is 1024 if not specified. |
+| prev-key | string | Optional. Bucket name from where listing should start, this key is excluded in the result. It must be a valid bucket name. |
+
+Sample HTTP GET request:
+
+    curl -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http://localhost:9880/volume-of-bilbo?max-keys=10"
+
+this request lists all the buckets under volume *volume-of-bilbo*, and the result at most contains 10 entries. The client receives response of a array of JSON objects, each of them represents for a bucket info.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: e048c3d5-169c-470f-9903-632d9f9e32d5
+    Date: Tue, 27 Jun 2017 09:12:18 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 207
+    Connection: keep-alive
+
+    {
+      "buckets" : [ {
+        "volumeName" : "volume-of-bilbo",
+        "bucketName" : "bucket-0",
+        "createdOn" : "Tue, 27 Jun 2017 08:55:25 GMT",
+        "acls" : [ ],
+        "versioning" : null,
+        "storageType" : "DISK",
+        "bytesUsed" : 0,
+        "keyCount" : 0
+        },
+        ...
+      ]
+    }
+
+Key APIs
+------------------
+
+### Put Key
+
+This API allows user to create or overwrite keys inside of a bucket.
+
+Schema:
+
+- `PUT /{volume}/{bucket}/{key}`
+
+Additional HTTP headers:
+
+| HTTP Header | Value | Description |
+|:---- |:---- |:----
+| Content-MD5 | MD5 digest | Standard HTTP header, file hash. |
+
+Sample PUT HTTP request:
+
+    curl -X PUT -T /path/to/localfile -H "Authorization:OZONE" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "x-ozone-version: v1" -H "x-ozone-user:bilbo" "http://localhost:9880/volume-of-bilbo/bucket-0/file-0"
+
+this request uploads a local file */path/to/localfile* specified by option *-T* to ozone as user *bilbo*, mapped to ozone key */volume-of-bilbo/bucket-0/file-0*. The client receives a zero length content response.
+
+### Get Key
+
+This API allows user to get or download a key from an ozone bucket.
+
+Schema:
+
+- `GET /{volume}/{bucket}/{key}`
+
+Sample HTTP GET request:
+
+    curl -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http://localhost:9880/volume-of-bilbo/bucket-0/file-0"
+
+this request reads the content of key */volume-of-bilbo/bucket-0/file-0*. If the content of the file is plain text, it can be directly dumped onto stdout.
+
+    HTTP/1.1 200 OK
+    Content-Type: application/octet-stream
+    x-ozone-server-name: localhost
+    x-ozone-request-id: 1bcd7de7-d8e3-46bb-afee-bdc933d383b8
+    Date: Tue, 27 Jun 2017 09:35:29 GMT
+    Content-Length: 6
+    Connection: keep-alive
+
+    Hello Ozone!
+
+if the file is not plain text, specify *-O* option in curl command and the file *file-0* will be downloaded into current working directory, file name will be same as the key. A sample request like following:
+
+    curl -O -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http://localhost:9880/volume-of-bilbo/bucket-0/file-1"
+
+response looks like following:
+
+    % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
+                                 Dload  Upload   Total   Spent    Left  Speed
+    100 6148k  100 6148k    0     0  24.0M      0 --:--:-- --:--:-- --:--:-- 24.1M
+
+### Delete Key
+
+This API allows user to delete a key from a bucket.
+
+Schema:
+
+- `DELETE /{volume}/{bucket}/{key}`
+
+Sample HTTP DELETE request:
+
+    curl -i -X DELETE -H "Authorization:OZONE root" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "x-ozone-version: v1" -H "x-ozone-user:bilbo" "http://localhost:9880/volume-of-bilbo/bucket-0/file-0"
+
+this request deletes key */volume-of-bilbo/bucket-0/file-0*. The client receives a zero length content result:
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: f8c4a373-dd5f-4e3a-b6c4-ddf7e191fe91
+    Date: Tue, 27 Jun 2017 14:19:48 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 0
+    Connection: keep-alive
+
+### Info Key
+
+This API returns information about a given key.
+
+Schema:
+
+- `GET /{volume}/{bucket}/{key}?info=key`
+
+Query Parameter:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| info | String, "key" | Required and enforced with this value. |
+
+Sample HTTP DELETE request:
+
+    curl -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http://localhost:9880/volume-of-bilbo/buket-0/file-0?info=key"
+
+this request returns information of the key */volume-of-bilbo/bucket-0/file-0*. The client receives a JSON object listed attributes of the key.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: c674343c-a0f2-49e4-bbd6-daa73e7dc131
+    Date: Mon, 03 Jul 2017 14:28:45 GMT
+    Content-Type: application/octet-stream
+    Content-Length: 73
+    Connection: keep-alive
+
+    {
+      "version" : 0,
+      "md5hash" : null,
+      "createdOn" : "Mon, 26 Jun 2017 04:23:30 GMT",
+      "modifiedOn" : "Mon, 26 Jun 2017 04:23:30 GMT",
+      "size" : 0,
+      "keyName" : "file-0"
+    }
+
+### List Keys
+
+This API allows user to list keys in a bucket.
+
+Schema:
+
+- `GET /{volume}/{bucket}?prefix=<PREFIX>&max-keys=<MAX_RESULT_SIZE>&prev-key=<PREVIOUS_KEY>`
+
+Query Parameters:
+
+| Query Parameter | Value | Description |
+|:---- |:---- |:----
+| prefix | string | Optional. Only keys with this prefix are included in the result. |
+| max-keys | int | Optional. Maximum number of keys included in the result. Default is 1024 if not specified. |
+| prev-key | string | Optional. Key name from where listing should start, this key is excluded in the result. It must be a valid key name. |
+
+Sample HTTP GET request:
+
+    curl -i -H "x-ozone-user: bilbo" -H "x-ozone-version: v1" -H "Date: Mon, 26 Jun 2017 04:23:30 GMT" -H "Authorization:OZONE" "http:/localhost:9880/volume-of-bilbo/bucket-0/?max-keys=100&prefix=file"
+
+this request list keys under bucket */volume-of-bilbo/bucket-0*, the listing result is filtered by prefix *file*. The client receives an array of JSON objects, each of them represents the info of a matched key.
+
+    HTTP/1.1 200 OK
+    x-ozone-server-name: localhost
+    x-ozone-request-id: 7f9fc970-9904-4c56-b671-83a086c6f555
+    Date: Tue, 27 Jun 2017 09:48:59 GMT
+    Content-Type: application/json
+    Content-Length: 209
+    Connection: keep-alive
+
+    {
+      "name" : null,
+      "prefix" : file,
+      "maxKeys" : 0,
+      "truncated" : false,
+      "keyList" : [ {
+          "version" : 0,
+          "md5hash" : null,
+          "createdOn" : "Mon, 26 Jun 2017 04:23:30 GMT",
+          "modifiedOn" : "Mon, 26 Jun 2017 04:23:30 GMT",
+          "size" : 0,
+          "keyName" : "file-0"
+          },
+          ...
+       ]
+    }
diff --git a/hadoop-ozone/ozone-manager/src/main/webapps/ksm/index.html b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/index.html
new file mode 100644
index 0000000..bee42bf
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/index.html
@@ -0,0 +1,70 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<html lang="en">
+<head>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+    <!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
+    <meta name="description" content="HDFS Key Space Manager">
+
+    <title>HDFS Key Space Manager</title>
+
+    <link href="static/bootstrap-3.0.2/css/bootstrap.min.css" rel="stylesheet">
+    <link href="static/hadoop.css" rel="stylesheet">
+    <link href="static/nvd3-1.8.5.min.css" rel="stylesheet">
+
+    <link href="static/ozone.css" rel="stylesheet">
+
+</head>
+
+<body ng-app="ksm">
+
+<header class="navbar navbar-inverse navbar-fixed-top bs-docs-nav">
+    <div class="container-fluid">
+        <div class="navbar-header">
+            <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar"
+                    aria-expanded="false" aria-controls="navbar">
+                <span class="sr-only">Toggle navigation</span>
+                <span class="icon-bar"></span>
+                <span class="icon-bar"></span>
+                <span class="icon-bar"></span>
+            </button>
+            <a class="navbar-brand" href="#">HDFS KSM</a>
+        </div>
+        <navmenu
+                metrics="{ 'Ksm metrics' : '#!/metrics/ksm', 'Rpc metrics' : '#!/metrics/rpc'}"></navmenu>
+    </div>
+</header>
+
+<div class="container-fluid">
+    <ng-view></ng-view>
+</div><!-- /.container -->
+
+<script src="static/jquery-1.10.2.min.js"></script>
+<script src="static/angular-1.6.4.min.js"></script>
+<script src="static/angular-route-1.6.4.min.js"></script>
+<script src="static/d3-3.5.17.min.js"></script>
+<script src="static/nvd3-1.8.5.min.js"></script>
+<script src="static/angular-nvd3-1.0.9.min.js"></script>
+<script src="static/ozone.js"></script>
+<script src="ksm.js"></script>
+<script src="static/bootstrap-3.0.2/js/bootstrap.min.js"></script>
+</body>
+</html>
diff --git a/hadoop-ozone/ozone-manager/src/main/webapps/ksm/ksm-metrics.html b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/ksm-metrics.html
new file mode 100644
index 0000000..e63fb00
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/ksm-metrics.html
@@ -0,0 +1,44 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<h1>KSM Metrics</h1>
+
+<div ng-repeat="(type,numbers) in $ctrl.metrics.nums">
+    <h2>{{type}}</h2>
+    <div class="container">
+        <div class="col-md-6">
+            <h3>Requests ({{numbers.ops}} ops)</h3>
+            <nvd3 options="$ctrl.graphOptions"
+                  data="numbers.all"></nvd3>
+        </div>
+        <div class="col-md-6">
+            <h3>Failures</h3>
+            <nvd3 options="$ctrl.graphOptions"
+                  data="numbers.failures"></nvd3>
+        </div>
+    </div>
+</div>
+
+<div ng-show="$ctrl.metrics.others.length > 0">
+    <h2>Other JMX properties</h2>
+
+    <table class="table">
+        <tr ng-repeat="metric in $ctrl.metrics.others">
+            <td>{{metric.key}}</td>
+            <td>{{metric.value}}</td>
+        </tr>
+    </table>
+</div>
diff --git a/hadoop-ozone/ozone-manager/src/main/webapps/ksm/ksm.js b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/ksm.js
new file mode 100644
index 0000000..7fb52b1
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/ksm.js
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function () {
+    "use strict";
+
+    var isIgnoredJmxKeys = function (key) {
+        return key == 'name' || key == 'modelerType' || key.match(/tag.*/);
+    };
+
+    angular.module('ksm', ['ozone', 'nvd3']);
+    angular.module('ksm').config(function ($routeProvider) {
+        $routeProvider
+            .when("/metrics/ksm", {
+                template: "<ksm-metrics></ksm-metrics>"
+            });
+    });
+    angular.module('ksm').component('ksmMetrics', {
+        templateUrl: 'ksm-metrics.html',
+        controller: function ($http) {
+            var ctrl = this;
+
+            ctrl.graphOptions = {
+                chart: {
+                    type: 'pieChart',
+                    height: 500,
+                    x: function (d) {
+                        return d.key;
+                    },
+                    y: function (d) {
+                        return d.value;
+                    },
+                    showLabels: true,
+                    labelType: 'value',
+                    duration: 500,
+                    labelThreshold: 0.01,
+                    labelSunbeamLayout: true,
+                    legend: {
+                        margin: {
+                            top: 5,
+                            right: 35,
+                            bottom: 5,
+                            left: 0
+                        }
+                    }
+                }
+            };
+
+
+            $http.get("jmx?qry=Hadoop:service=KeySpaceManager,name=KSMMetrics")
+                .then(function (result) {
+
+                    var groupedMetrics = {others: [], nums: {}};
+                    var metrics = result.data.beans[0]
+                    for (var key in metrics) {
+                        var numericalStatistic = key.match(/Num([A-Z][a-z]+)(.+?)(Fails)?$/);
+                        if (numericalStatistic) {
+                            var type = numericalStatistic[1];
+                            var name = numericalStatistic[2];
+                            var failed = numericalStatistic[3];
+                            groupedMetrics.nums[type] = groupedMetrics.nums[type] || {
+                                    failures: [],
+                                    all: []
+                                };
+                            if (failed) {
+                                groupedMetrics.nums[type].failures.push({
+                                    key: name,
+                                    value: metrics[key]
+                                })
+                            } else {
+                                if (name == "Ops") {
+                                    groupedMetrics.nums[type].ops = metrics[key]
+                                } else {
+                                    groupedMetrics.nums[type].all.push({
+                                        key: name,
+                                        value: metrics[key]
+                                    })
+                                }
+                            }
+                        } else if (isIgnoredJmxKeys(key)) {
+                            //ignore
+                        } else {
+                            groupedMetrics.others.push({
+                                'key': key,
+                                'value': metrics[key]
+                            });
+                        }
+                    }
+                    ctrl.metrics = groupedMetrics;
+                })
+        }
+    });
+
+})();
diff --git a/hadoop-ozone/ozone-manager/src/main/webapps/ksm/main.css b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/main.css
new file mode 100644
index 0000000..e442adc
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/main.css
@@ -0,0 +1,23 @@
+/**
+ *   Licensed to the Apache Software Foundation (ASF) under one or more
+ *  contributor license agreements.  See the NOTICE file distributed with
+ *  this work for additional information regarding copyright ownership.
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *  (the "License"); you may not use this file except in compliance with
+ *  the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+*/
+body {
+  padding-top: 50px;
+}
+.starter-template {
+  padding: 40px 15px;
+  text-align: center;
+}
\ No newline at end of file
diff --git a/hadoop-ozone/ozone-manager/src/main/webapps/ksm/main.html b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/main.html
new file mode 100644
index 0000000..0821899
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/main/webapps/ksm/main.html
@@ -0,0 +1,18 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+  -->
+<overview>
+</overview>
\ No newline at end of file
diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestBucketManagerImpl.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestBucketManagerImpl.java
new file mode 100644
index 0000000..0b43bf9
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestBucketManagerImpl.java
@@ -0,0 +1,395 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.fs.StorageType;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketArgs;
+import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.ksm.exceptions.KSMException;
+import org.apache.hadoop.ozone.ksm.exceptions
+    .KSMException.ResultCodes;
+import org.apache.hadoop.ozone.OzoneAcl;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.stubbing.Answer;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.LinkedList;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import static org.mockito.Mockito.any;
+
+/**
+ * Tests BucketManagerImpl, mocks KSMMetadataManager for testing.
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestBucketManagerImpl {
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  private KSMMetadataManager getMetadataManagerMock(String... volumesToCreate)
+      throws IOException {
+    KSMMetadataManager metadataManager = Mockito.mock(KSMMetadataManager.class);
+    Map<String, byte[]> metadataDB = new HashMap<>();
+    ReadWriteLock lock = new ReentrantReadWriteLock();
+
+    Mockito.when(metadataManager.writeLock()).thenReturn(lock.writeLock());
+    Mockito.when(metadataManager.readLock()).thenReturn(lock.readLock());
+    Mockito.when(metadataManager.getVolumeKey(any(String.class))).thenAnswer(
+        (InvocationOnMock invocation) ->
+            DFSUtil.string2Bytes(
+                OzoneConsts.KSM_VOLUME_PREFIX + invocation.getArguments()[0]));
+    Mockito.when(metadataManager
+        .getBucketKey(any(String.class), any(String.class))).thenAnswer(
+            (InvocationOnMock invocation) ->
+                DFSUtil.string2Bytes(
+                    OzoneConsts.KSM_VOLUME_PREFIX
+                        + invocation.getArguments()[0]
+                        + OzoneConsts.KSM_BUCKET_PREFIX
+                        + invocation.getArguments()[1]));
+
+    Mockito.doAnswer(
+        new Answer<Boolean>() {
+          @Override
+          public Boolean answer(InvocationOnMock invocation)
+              throws Throwable {
+            String keyRootName =  OzoneConsts.KSM_KEY_PREFIX
+                + invocation.getArguments()[0]
+                + OzoneConsts.KSM_KEY_PREFIX
+                + invocation.getArguments()[1]
+                + OzoneConsts.KSM_KEY_PREFIX;
+            Iterator<String> keyIterator = metadataDB.keySet().iterator();
+            while(keyIterator.hasNext()) {
+              if(keyIterator.next().startsWith(keyRootName)) {
+                return false;
+              }
+            }
+            return true;
+          }
+        }).when(metadataManager).isBucketEmpty(any(String.class),
+        any(String.class));
+
+    Mockito.doAnswer(
+        new Answer<Void>() {
+          @Override
+          public Void answer(InvocationOnMock invocation) throws Throwable {
+            metadataDB.put(DFSUtil.bytes2String(
+                (byte[])invocation.getArguments()[0]),
+                (byte[])invocation.getArguments()[1]);
+            return null;
+          }
+        }).when(metadataManager).put(any(byte[].class), any(byte[].class));
+
+    Mockito.when(metadataManager.get(any(byte[].class))).thenAnswer(
+        (InvocationOnMock invocation) ->
+            metadataDB.get(DFSUtil.bytes2String(
+                (byte[])invocation.getArguments()[0]))
+    );
+    Mockito.doAnswer(
+        new Answer<Void>() {
+          @Override
+          public Void answer(InvocationOnMock invocation) throws Throwable {
+            metadataDB.remove(DFSUtil.bytes2String(
+                (byte[])invocation.getArguments()[0]));
+            return null;
+          }
+        }).when(metadataManager).delete(any(byte[].class));
+
+    for(String volumeName : volumesToCreate) {
+      byte[] dummyVolumeInfo = DFSUtil.string2Bytes(volumeName);
+      metadataDB.put(OzoneConsts.KSM_VOLUME_PREFIX + volumeName,
+                     dummyVolumeInfo);
+    }
+    return metadataManager;
+  }
+
+  @Test
+  public void testCreateBucketWithoutVolume() throws IOException {
+    thrown.expectMessage("Volume doesn't exist");
+    KSMMetadataManager metaMgr = getMetadataManagerMock();
+    try {
+      BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+      KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+          .setVolumeName("sampleVol")
+          .setBucketName("bucketOne")
+          .build();
+      bucketManager.createBucket(bucketInfo);
+    } catch(KSMException ksmEx) {
+      Assert.assertEquals(ResultCodes.FAILED_VOLUME_NOT_FOUND,
+          ksmEx.getResult());
+      throw ksmEx;
+    }
+  }
+
+  @Test
+  public void testCreateBucket() throws IOException {
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .build();
+    bucketManager.createBucket(bucketInfo);
+    Assert.assertNotNull(bucketManager.getBucketInfo("sampleVol", "bucketOne"));
+  }
+
+  @Test
+  public void testCreateAlreadyExistingBucket() throws IOException {
+    thrown.expectMessage("Bucket already exist");
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    try {
+      BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+      KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+          .setVolumeName("sampleVol")
+          .setBucketName("bucketOne")
+          .build();
+      bucketManager.createBucket(bucketInfo);
+      bucketManager.createBucket(bucketInfo);
+    } catch(KSMException ksmEx) {
+      Assert.assertEquals(ResultCodes.FAILED_BUCKET_ALREADY_EXISTS,
+          ksmEx.getResult());
+      throw ksmEx;
+    }
+  }
+
+  @Test
+  public void testGetBucketInfoForInvalidBucket() throws IOException {
+    thrown.expectMessage("Bucket not found");
+    try {
+      KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+      BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+      bucketManager.getBucketInfo("sampleVol", "bucketOne");
+    } catch(KSMException ksmEx) {
+      Assert.assertEquals(ResultCodes.FAILED_BUCKET_NOT_FOUND,
+          ksmEx.getResult());
+      throw ksmEx;
+    }
+  }
+
+  @Test
+  public void testGetBucketInfo() throws IOException {
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setStorageType(StorageType.DISK)
+        .setIsVersionEnabled(false)
+        .build();
+    bucketManager.createBucket(bucketInfo);
+    KsmBucketInfo result = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertEquals("sampleVol", result.getVolumeName());
+    Assert.assertEquals("bucketOne", result.getBucketName());
+    Assert.assertEquals(StorageType.DISK,
+        result.getStorageType());
+    Assert.assertEquals(false, result.getIsVersionEnabled());
+  }
+
+  @Test
+  public void testSetBucketPropertyAddACL() throws IOException {
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    List<OzoneAcl> acls = new LinkedList<>();
+    OzoneAcl ozoneAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+        "root", OzoneAcl.OzoneACLRights.READ);
+    acls.add(ozoneAcl);
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setAcls(acls)
+        .setStorageType(StorageType.DISK)
+        .setIsVersionEnabled(false)
+        .build();
+    bucketManager.createBucket(bucketInfo);
+    KsmBucketInfo result = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertEquals("sampleVol", result.getVolumeName());
+    Assert.assertEquals("bucketOne", result.getBucketName());
+    Assert.assertEquals(1, result.getAcls().size());
+    List<OzoneAcl> addAcls = new LinkedList<>();
+    OzoneAcl newAcl = new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+        "ozone", OzoneAcl.OzoneACLRights.READ);
+    addAcls.add(newAcl);
+    KsmBucketArgs bucketArgs = KsmBucketArgs.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setAddAcls(addAcls)
+        .build();
+    bucketManager.setBucketProperty(bucketArgs);
+    KsmBucketInfo updatedResult = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertEquals(2, updatedResult.getAcls().size());
+    Assert.assertTrue(updatedResult.getAcls().contains(newAcl));
+  }
+
+  @Test
+  public void testSetBucketPropertyRemoveACL() throws IOException {
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    List<OzoneAcl> acls = new LinkedList<>();
+    OzoneAcl aclOne = new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+        "root", OzoneAcl.OzoneACLRights.READ);
+    OzoneAcl aclTwo = new OzoneAcl(OzoneAcl.OzoneACLType.USER,
+        "ozone", OzoneAcl.OzoneACLRights.READ);
+    acls.add(aclOne);
+    acls.add(aclTwo);
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setAcls(acls)
+        .setStorageType(StorageType.DISK)
+        .setIsVersionEnabled(false)
+        .build();
+    bucketManager.createBucket(bucketInfo);
+    KsmBucketInfo result = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertEquals(2, result.getAcls().size());
+    List<OzoneAcl> removeAcls = new LinkedList<>();
+    removeAcls.add(aclTwo);
+    KsmBucketArgs bucketArgs = KsmBucketArgs.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setRemoveAcls(removeAcls)
+        .build();
+    bucketManager.setBucketProperty(bucketArgs);
+    KsmBucketInfo updatedResult = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertEquals(1, updatedResult.getAcls().size());
+    Assert.assertFalse(updatedResult.getAcls().contains(aclTwo));
+  }
+
+  @Test
+  public void testSetBucketPropertyChangeStorageType() throws IOException {
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setStorageType(StorageType.DISK)
+        .build();
+    bucketManager.createBucket(bucketInfo);
+    KsmBucketInfo result = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertEquals(StorageType.DISK,
+        result.getStorageType());
+    KsmBucketArgs bucketArgs = KsmBucketArgs.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setStorageType(StorageType.SSD)
+        .build();
+    bucketManager.setBucketProperty(bucketArgs);
+    KsmBucketInfo updatedResult = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertEquals(StorageType.SSD,
+        updatedResult.getStorageType());
+  }
+
+  @Test
+  public void testSetBucketPropertyChangeVersioning() throws IOException {
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setIsVersionEnabled(false)
+        .build();
+    bucketManager.createBucket(bucketInfo);
+    KsmBucketInfo result = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertFalse(result.getIsVersionEnabled());
+    KsmBucketArgs bucketArgs = KsmBucketArgs.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .setIsVersionEnabled(true)
+        .build();
+    bucketManager.setBucketProperty(bucketArgs);
+    KsmBucketInfo updatedResult = bucketManager.getBucketInfo(
+        "sampleVol", "bucketOne");
+    Assert.assertTrue(updatedResult.getIsVersionEnabled());
+  }
+
+  @Test
+  public void testDeleteBucket() throws IOException {
+    thrown.expectMessage("Bucket not found");
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    for(int i = 0; i < 5; i++) {
+      KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+          .setVolumeName("sampleVol")
+          .setBucketName("bucket_" + i)
+          .build();
+      bucketManager.createBucket(bucketInfo);
+    }
+    for(int i = 0; i < 5; i++) {
+      Assert.assertEquals("bucket_" + i,
+          bucketManager.getBucketInfo(
+              "sampleVol", "bucket_" + i).getBucketName());
+    }
+    try {
+      bucketManager.deleteBucket("sampleVol", "bucket_1");
+      Assert.assertNotNull(bucketManager.getBucketInfo(
+          "sampleVol", "bucket_2"));
+    } catch(IOException ex) {
+      Assert.fail(ex.getMessage());
+    }
+    try {
+      bucketManager.getBucketInfo("sampleVol", "bucket_1");
+    } catch(KSMException ksmEx) {
+      Assert.assertEquals(ResultCodes.FAILED_BUCKET_NOT_FOUND,
+          ksmEx.getResult());
+      throw ksmEx;
+    }
+  }
+
+  @Test
+  public void testDeleteNonEmptyBucket() throws IOException {
+    thrown.expectMessage("Bucket is not empty");
+    KSMMetadataManager metaMgr = getMetadataManagerMock("sampleVol");
+    BucketManager bucketManager = new BucketManagerImpl(metaMgr);
+    KsmBucketInfo bucketInfo = KsmBucketInfo.newBuilder()
+        .setVolumeName("sampleVol")
+        .setBucketName("bucketOne")
+        .build();
+    bucketManager.createBucket(bucketInfo);
+    //Create keys in bucket
+    metaMgr.put(DFSUtil.string2Bytes("/sampleVol/bucketOne/key_one"),
+        DFSUtil.string2Bytes("value_one"));
+    metaMgr.put(DFSUtil.string2Bytes("/sampleVol/bucketOne/key_two"),
+        DFSUtil.string2Bytes("value_two"));
+    try {
+      bucketManager.deleteBucket("sampleVol", "bucketOne");
+    } catch(KSMException ksmEx) {
+      Assert.assertEquals(ResultCodes.FAILED_BUCKET_NOT_EMPTY,
+          ksmEx.getResult());
+      throw ksmEx;
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestChunkStreams.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestChunkStreams.java
new file mode 100644
index 0000000..de4a85a
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestChunkStreams.java
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.ozone.client.io.ChunkGroupInputStream;
+import org.apache.hadoop.ozone.client.io.ChunkGroupOutputStream;
+import org.apache.hadoop.hdds.scm.storage.ChunkInputStream;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * This class tests ChunkGroupInputStream and ChunkGroupOutStream.
+ */
+public class TestChunkStreams {
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  /**
+   * This test uses ByteArrayOutputStream as the underlying stream to test
+   * the correctness of ChunkGroupOutputStream.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testWriteGroupOutputStream() throws Exception {
+    try (ChunkGroupOutputStream groupOutputStream =
+             new ChunkGroupOutputStream()) {
+      ArrayList<OutputStream> outputStreams = new ArrayList<>();
+
+      // 5 byte streams, each 100 bytes. write 500 bytes means writing to each
+      // of them with 100 bytes.
+      for (int i = 0; i < 5; i++) {
+        ByteArrayOutputStream out = new ByteArrayOutputStream(100);
+        outputStreams.add(out);
+        groupOutputStream.addStream(out, 100);
+      }
+      assertEquals(0, groupOutputStream.getByteOffset());
+
+      String dataString = RandomStringUtils.randomAscii(500);
+      byte[] data = dataString.getBytes();
+      groupOutputStream.write(data, 0, data.length);
+      assertEquals(500, groupOutputStream.getByteOffset());
+
+      String res = "";
+      int offset = 0;
+      for (OutputStream stream : outputStreams) {
+        String subString = stream.toString();
+        res += subString;
+        assertEquals(dataString.substring(offset, offset + 100), subString);
+        offset += 100;
+      }
+      assertEquals(dataString, res);
+    }
+  }
+
+  @Test
+  public void testErrorWriteGroupOutputStream() throws Exception {
+    try (ChunkGroupOutputStream groupOutputStream =
+             new ChunkGroupOutputStream()) {
+      ArrayList<OutputStream> outputStreams = new ArrayList<>();
+
+      // 5 byte streams, each 100 bytes. write 500 bytes means writing to each
+      // of them with 100 bytes. all 5 streams makes up a ChunkGroupOutputStream
+      // with a total of 500 bytes in size
+      for (int i = 0; i < 5; i++) {
+        ByteArrayOutputStream out = new ByteArrayOutputStream(100);
+        outputStreams.add(out);
+        groupOutputStream.addStream(out, 100);
+      }
+      assertEquals(0, groupOutputStream.getByteOffset());
+
+      // first writes of 100 bytes should succeed
+      groupOutputStream.write(RandomStringUtils.randomAscii(100).getBytes());
+      assertEquals(100, groupOutputStream.getByteOffset());
+
+      // second writes of 500 bytes should fail, as there should be only 400
+      // bytes space left
+      // TODO : if we decide to take the 400 bytes instead in the future,
+      // other add more informative error code rather than exception, need to
+      // change this part.
+      exception.expect(Exception.class);
+      groupOutputStream.write(RandomStringUtils.randomAscii(500).getBytes());
+      assertEquals(100, groupOutputStream.getByteOffset());
+    }
+  }
+
+  @Test
+  public void testReadGroupInputStream() throws Exception {
+    try (ChunkGroupInputStream groupInputStream = new ChunkGroupInputStream()) {
+      ArrayList<ChunkInputStream> inputStreams = new ArrayList<>();
+
+      String dataString = RandomStringUtils.randomAscii(500);
+      byte[] buf = dataString.getBytes();
+      int offset = 0;
+      for (int i = 0; i < 5; i++) {
+        int tempOffset = offset;
+        ChunkInputStream in =
+            new ChunkInputStream(null, null, null, new ArrayList<>(), null) {
+              private ByteArrayInputStream in =
+                  new ByteArrayInputStream(buf, tempOffset, 100);
+
+              @Override
+              public void seek(long pos) throws IOException {
+                throw new UnsupportedOperationException();
+              }
+
+              @Override
+              public long getPos() throws IOException {
+                throw new UnsupportedOperationException();
+              }
+
+              @Override
+              public boolean seekToNewSource(long targetPos)
+                  throws IOException {
+                throw new UnsupportedOperationException();
+              }
+
+              @Override
+              public int read() throws IOException {
+                return in.read();
+              }
+
+              @Override
+              public int read(byte[] b, int off, int len) throws IOException {
+                return in.read(b, off, len);
+              }
+            };
+        inputStreams.add(in);
+        offset += 100;
+        groupInputStream.addStream(in, 100);
+      }
+
+      byte[] resBuf = new byte[500];
+      int len = groupInputStream.read(resBuf, 0, 500);
+
+      assertEquals(500, len);
+      assertEquals(dataString, new String(resBuf));
+    }
+  }
+
+  @Test
+  public void testErrorReadGroupInputStream() throws Exception {
+    try (ChunkGroupInputStream groupInputStream = new ChunkGroupInputStream()) {
+      ArrayList<ChunkInputStream> inputStreams = new ArrayList<>();
+
+      String dataString = RandomStringUtils.randomAscii(500);
+      byte[] buf = dataString.getBytes();
+      int offset = 0;
+      for (int i = 0; i < 5; i++) {
+        int tempOffset = offset;
+        ChunkInputStream in =
+            new ChunkInputStream(null, null, null, new ArrayList<>(), null) {
+              private ByteArrayInputStream in =
+                  new ByteArrayInputStream(buf, tempOffset, 100);
+
+              @Override
+              public void seek(long pos) throws IOException {
+                throw new UnsupportedOperationException();
+              }
+
+              @Override
+              public long getPos() throws IOException {
+                throw new UnsupportedOperationException();
+              }
+
+              @Override
+              public boolean seekToNewSource(long targetPos)
+                  throws IOException {
+                throw new UnsupportedOperationException();
+              }
+
+              @Override
+              public int read() throws IOException {
+                return in.read();
+              }
+
+              @Override
+              public int read(byte[] b, int off, int len) throws IOException {
+                return in.read(b, off, len);
+              }
+            };
+        inputStreams.add(in);
+        offset += 100;
+        groupInputStream.addStream(in, 100);
+      }
+
+      byte[] resBuf = new byte[600];
+      // read 300 bytes first
+      int len = groupInputStream.read(resBuf, 0, 340);
+      assertEquals(3, groupInputStream.getCurrentStreamIndex());
+      assertEquals(60, groupInputStream.getRemainingOfIndex(3));
+      assertEquals(340, len);
+      assertEquals(dataString.substring(0, 340),
+          new String(resBuf).substring(0, 340));
+
+      // read following 300 bytes, but only 200 left
+      len = groupInputStream.read(resBuf, 340, 260);
+      assertEquals(5, groupInputStream.getCurrentStreamIndex());
+      assertEquals(0, groupInputStream.getRemainingOfIndex(4));
+      assertEquals(160, len);
+      assertEquals(dataString, new String(resBuf).substring(0, 500));
+
+      // further read should get EOF
+      len = groupInputStream.read(resBuf, 0, 1);
+      // reached EOF, further read should get -1
+      assertEquals(-1, len);
+    }
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerHttpServer.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerHttpServer.java
new file mode 100644
index 0000000..b263df5
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerHttpServer.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.ksm;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.web.URLConnectionFactory;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.http.HttpConfig.Policy;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Arrays;
+import java.util.Collection;
+
+/**
+ * Test http server os KSM with various HTTP option.
+ */
+@RunWith(value = Parameterized.class)
+public class TestKeySpaceManagerHttpServer {
+  private static final String BASEDIR = GenericTestUtils
+      .getTempPath(TestKeySpaceManagerHttpServer.class.getSimpleName());
+  private static String keystoresDir;
+  private static String sslConfDir;
+  private static Configuration conf;
+  private static URLConnectionFactory connectionFactory;
+
+  @Parameters public static Collection<Object[]> policy() {
+    Object[][] params = new Object[][] {
+        {HttpConfig.Policy.HTTP_ONLY},
+        {HttpConfig.Policy.HTTPS_ONLY},
+        {HttpConfig.Policy.HTTP_AND_HTTPS} };
+    return Arrays.asList(params);
+  }
+
+  private final HttpConfig.Policy policy;
+
+  public TestKeySpaceManagerHttpServer(Policy policy) {
+    super();
+    this.policy = policy;
+  }
+
+  @BeforeClass public static void setUp() throws Exception {
+    File base = new File(BASEDIR);
+    FileUtil.fullyDelete(base);
+    base.mkdirs();
+    conf = new Configuration();
+    keystoresDir = new File(BASEDIR).getAbsolutePath();
+    sslConfDir = KeyStoreTestUtil.getClasspathDir(
+        TestKeySpaceManagerHttpServer.class);
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
+    connectionFactory =
+        URLConnectionFactory.newDefaultURLConnectionFactory(conf);
+    conf.set(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
+        KeyStoreTestUtil.getClientSSLConfigFileName());
+    conf.set(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+        KeyStoreTestUtil.getServerSSLConfigFileName());
+  }
+
+  @AfterClass public static void tearDown() throws Exception {
+    FileUtil.fullyDelete(new File(BASEDIR));
+    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
+  }
+
+  @Test public void testHttpPolicy() throws Exception {
+    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
+    conf.set(ScmConfigKeys.OZONE_SCM_HTTPS_ADDRESS_KEY, "localhost:0");
+
+    InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 0);
+    KeySpaceManagerHttpServer server = null;
+    try {
+      server = new KeySpaceManagerHttpServer(conf, null);
+      server.start();
+
+      Assert.assertTrue(implies(policy.isHttpEnabled(),
+          canAccess("http", server.getHttpAddress())));
+      Assert.assertTrue(
+          implies(!policy.isHttpEnabled(), server.getHttpAddress() == null));
+
+      Assert.assertTrue(implies(policy.isHttpsEnabled(),
+          canAccess("https", server.getHttpsAddress())));
+      Assert.assertTrue(
+          implies(!policy.isHttpsEnabled(), server.getHttpsAddress() == null));
+
+    } finally {
+      if (server != null) {
+        server.stop();
+      }
+    }
+  }
+
+  private static boolean canAccess(String scheme, InetSocketAddress addr) {
+    if (addr == null) {
+      return false;
+    }
+    try {
+      URL url =
+          new URL(scheme + "://" + NetUtils.getHostPortString(addr) + "/jmx");
+      URLConnection conn = connectionFactory.openConnection(url);
+      conn.connect();
+      conn.getContent();
+    } catch (Exception e) {
+      return false;
+    }
+    return true;
+  }
+
+  private static boolean implies(boolean a, boolean b) {
+    return !a || b;
+  }
+}
diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/package-info.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/package-info.java
new file mode 100644
index 0000000..089ff4b
--- /dev/null
+++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/ksm/package-info.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.ksm;
+/**
+ * KSM tests
+ */
diff --git a/hadoop-ozone/pom.xml b/hadoop-ozone/pom.xml
new file mode 100644
index 0000000..5667444
--- /dev/null
+++ b/hadoop-ozone/pom.xml
@@ -0,0 +1,169 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project-dist</artifactId>
+    <version>3.2.0-SNAPSHOT</version>
+    <relativePath>../hadoop-project-dist</relativePath>
+  </parent>
+  <artifactId>hadoop-ozone</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone parent project</description>
+  <name>Apache Hadoop Ozone</name>
+  <packaging>pom</packaging>
+
+  <modules>
+    <module>common</module>
+    <module>client</module>
+    <module>ozone-manager</module>
+    <module>tools</module>
+    <module>integration-test</module>
+    <module>objectstore-service</module>
+  </modules>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-framework</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-scm</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-container-service</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-tools</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-scm</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-container-service</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>**/target/**</exclude>
+            <exclude>acceptance-test/hadoop-ozone-acceptance-test.iml</exclude>
+            <exclude>.gitattributes</exclude>
+            <exclude>.idea/**</exclude>
+            <exclude>dev-support/checkstyle*</exclude>
+            <exclude>dev-support/jdiff/**</exclude>
+            <exclude>dev-support/*tests</exclude>
+            <exclude>src/test/empty-file</exclude>
+            <exclude>src/test/all-tests</exclude>
+            <exclude>src/test/resources/*.tgz</exclude>
+            <exclude>src/test/resources/data*</exclude>
+            <exclude>**/*.json</exclude>
+            <exclude>src/test/resources/empty-file</exclude>
+            <exclude>src/main/webapps/datanode/robots.txt</exclude>
+            <exclude>src/main/webapps/hdfs/robots.txt</exclude>
+            <exclude>src/main/webapps/journal/robots.txt</exclude>
+            <exclude>src/main/webapps/secondary/robots.txt</exclude>
+            <exclude>src/main/webapps/router/robots.txt</exclude>
+            <exclude>src/contrib/**</exclude>
+            <exclude>src/site/resources/images/*</exclude>
+            <exclude>webapps/static/bootstrap-3.0.2/**</exclude>
+            <exclude>webapps/static/jquery-1.10.2.min.js</exclude>
+            <exclude>webapps/static/jquery.dataTables.min.js</exclude>
+            <exclude>webapps/static/nvd3-1.8.5.min.css.map</exclude>
+            <exclude>webapps/static/nvd3-1.8.5.min.js</exclude>
+            <exclude>webapps/static/angular-route-1.6.4.min.js
+            </exclude>
+            <exclude>webapps/static/nvd3-1.8.5.min.css</exclude>
+            <exclude>webapps/static/angular-nvd3-1.0.9.min.js</exclude>
+            <exclude>webapps/static/nvd3-1.8.5.min.js.map</exclude>
+            <exclude>webapps/static/angular-1.6.4.min.js</exclude>
+            <exclude>webapps/static/d3-3.5.17.min.js</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <configuration>
+          <excludeFilterFile combine.self="override"></excludeFilterFile>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/hadoop-ozone/tools/pom.xml b/hadoop-ozone/tools/pom.xml
new file mode 100644
index 0000000..918a675
--- /dev/null
+++ b/hadoop-ozone/tools/pom.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-ozone</artifactId>
+    <version>0.2.1-SNAPSHOT</version>
+  </parent>
+  <artifactId>hadoop-ozone-tools</artifactId>
+  <version>0.2.1-SNAPSHOT</version>
+  <description>Apache Hadoop Ozone Tools</description>
+  <name>Apache Hadoop Ozone Tools</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hadoop.component>hdds</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>io.dropwizard.metrics</groupId>
+      <artifactId>metrics-core</artifactId>
+      <version>3.2.4</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-scm</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.openjdk.jmh</groupId>
+      <artifactId>jmh-core</artifactId>
+      <version>1.19</version>
+    </dependency>
+    <dependency>
+      <groupId>org.openjdk.jmh</groupId>
+      <artifactId>jmh-generator-annprocess</artifactId>
+      <version>1.19</version>
+    </dependency>
+  </dependencies>
+</project>
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/Freon.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/Freon.java
new file mode 100644
index 0000000..d933e6f
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/Freon.java
@@ -0,0 +1,1149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.freon;
+
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Snapshot;
+import com.codahale.metrics.UniformReservoir;
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang.time.DurationFormatUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hdds.client.OzoneQuota;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.ozone.client.*;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+import org.apache.hadoop.util.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Supplier;
+
+import static java.lang.Math.min;
+
+/**
+ * Freon - A tool to populate ozone with data for testing.<br>
+ * This is not a map-reduce program and this is not for benchmarking
+ * Ozone write throughput.<br>
+ * It supports both online and offline modes. Default mode is offline,
+ * <i>-mode</i> can be used to change the mode.
+ * <p>
+ * In online mode, active internet connection is required,
+ * common crawl data from AWS will be used.<br>
+ * Default source is:<br>
+ * https://commoncrawl.s3.amazonaws.com/crawl-data/
+ * CC-MAIN-2017-17/warc.paths.gz<br>
+ * (it contains the path to actual data segment)<br>
+ * User can override this using <i>-source</i>.
+ * The following values are derived from URL of Common Crawl data
+ * <ul>
+ * <li>Domain will be used as Volume</li>
+ * <li>URL will be used as Bucket</li>
+ * <li>FileName will be used as Key</li>
+ * </ul></p>
+ * In offline mode, the data will be random bytes and
+ * size of data will be 10 KB.<br>
+ * <ul>
+ * <li>Default number of Volumes 10, <i>-numOfVolumes</i>
+ * can be used to override</li>
+ * <li>Default number of Buckets per Volume 1000, <i>-numOfBuckets</i>
+ * can be used to override</li>
+ * <li>Default number of Keys per Bucket 500000, <i>-numOfKeys</i>
+ * can be used to override</li>
+ * </ul>
+ */
+public final class Freon extends Configured implements Tool {
+
+  enum FreonOps {
+    VOLUME_CREATE,
+    BUCKET_CREATE,
+    KEY_CREATE,
+    KEY_WRITE
+  }
+
+  private static final String HELP = "help";
+  private static final String MODE = "mode";
+  private static final String SOURCE = "source";
+  private static final String VALIDATE_WRITE = "validateWrites";
+  private static final String JSON_WRITE_DIRECTORY = "jsonDir";
+  private static final String NUM_OF_THREADS = "numOfThreads";
+  private static final String NUM_OF_VOLUMES = "numOfVolumes";
+  private static final String NUM_OF_BUCKETS = "numOfBuckets";
+  private static final String NUM_OF_KEYS = "numOfKeys";
+  private static final String KEY_SIZE = "keySize";
+  private static final String RATIS = "ratis";
+
+  private static final String MODE_DEFAULT = "offline";
+  private static final String SOURCE_DEFAULT =
+      "https://commoncrawl.s3.amazonaws.com/" +
+          "crawl-data/CC-MAIN-2017-17/warc.paths.gz";
+  private static final String NUM_OF_THREADS_DEFAULT = "10";
+  private static final String NUM_OF_VOLUMES_DEFAULT = "10";
+  private static final String NUM_OF_BUCKETS_DEFAULT = "1000";
+  private static final String NUM_OF_KEYS_DEFAULT = "500000";
+  private static final String DURATION_FORMAT = "HH:mm:ss,SSS";
+
+  private static final int KEY_SIZE_DEFAULT = 10240;
+  private static final int QUANTILES = 10;
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Freon.class);
+
+  private boolean printUsage = false;
+  private boolean completed = false;
+  private boolean exception = false;
+
+  private String mode;
+  private String source;
+  private String numOfThreads;
+  private String numOfVolumes;
+  private String numOfBuckets;
+  private String numOfKeys;
+  private String jsonDir;
+  private boolean useRatis;
+  private ReplicationType type;
+  private ReplicationFactor factor;
+
+  private int threadPoolSize;
+  private int keySize;
+  private byte[] keyValue = null;
+
+  private boolean validateWrites;
+
+  private OzoneClient ozoneClient;
+  private ObjectStore objectStore;
+  private ExecutorService processor;
+
+  private long startTime;
+  private long jobStartTime;
+
+  private AtomicLong volumeCreationTime;
+  private AtomicLong bucketCreationTime;
+  private AtomicLong keyCreationTime;
+  private AtomicLong keyWriteTime;
+
+  private AtomicLong totalBytesWritten;
+
+  private AtomicInteger numberOfVolumesCreated;
+  private AtomicInteger numberOfBucketsCreated;
+  private AtomicLong numberOfKeysAdded;
+
+  private Long totalWritesValidated;
+  private Long writeValidationSuccessCount;
+  private Long writeValidationFailureCount;
+
+  private BlockingQueue<KeyValue> validationQueue;
+  private ArrayList<Histogram> histograms = new ArrayList<>();
+
+  @VisibleForTesting
+  Freon(Configuration conf) throws IOException {
+    startTime = System.nanoTime();
+    jobStartTime = System.currentTimeMillis();
+    volumeCreationTime = new AtomicLong();
+    bucketCreationTime = new AtomicLong();
+    keyCreationTime = new AtomicLong();
+    keyWriteTime = new AtomicLong();
+    totalBytesWritten = new AtomicLong();
+    numberOfVolumesCreated = new AtomicInteger();
+    numberOfBucketsCreated = new AtomicInteger();
+    numberOfKeysAdded = new AtomicLong();
+    ozoneClient = OzoneClientFactory.getClient(conf);
+    objectStore = ozoneClient.getObjectStore();
+    for (FreonOps ops : FreonOps.values()) {
+      histograms.add(ops.ordinal(), new Histogram(new UniformReservoir()));
+    }
+  }
+
+  /**
+   * @param args arguments
+   */
+  public static void main(String[] args) throws Exception {
+    Configuration conf = new OzoneConfiguration();
+    int res = ToolRunner.run(conf, new Freon(conf), args);
+    System.exit(res);
+  }
+
+  @Override
+  public int run(String[] args) throws Exception {
+    GenericOptionsParser parser = new GenericOptionsParser(getConf(),
+        getOptions(), args);
+    parseOptions(parser.getCommandLine());
+    if (printUsage) {
+      usage();
+      return 0;
+    }
+
+    keyValue =
+        DFSUtil.string2Bytes(RandomStringUtils.randomAscii(keySize - 36));
+
+    LOG.info("Number of Threads: " + numOfThreads);
+    threadPoolSize =
+        min(Integer.parseInt(numOfVolumes), Integer.parseInt(numOfThreads));
+    processor = Executors.newFixedThreadPool(threadPoolSize);
+    addShutdownHook();
+    if (mode.equals("online")) {
+      LOG.info("Mode: online");
+      throw new UnsupportedOperationException("Not yet implemented.");
+    } else {
+      LOG.info("Mode: offline");
+      LOG.info("Number of Volumes: {}.", numOfVolumes);
+      LOG.info("Number of Buckets per Volume: {}.", numOfBuckets);
+      LOG.info("Number of Keys per Bucket: {}.", numOfKeys);
+      LOG.info("Key size: {} bytes", keySize);
+      for (int i = 0; i < Integer.parseInt(numOfVolumes); i++) {
+        String volume = "vol-" + i + "-" +
+            RandomStringUtils.randomNumeric(5);
+        processor.submit(new OfflineProcessor(volume));
+      }
+    }
+    Thread validator = null;
+    if (validateWrites) {
+      totalWritesValidated = 0L;
+      writeValidationSuccessCount = 0L;
+      writeValidationFailureCount = 0L;
+
+      validationQueue =
+          new ArrayBlockingQueue<>(Integer.parseInt(numOfThreads));
+      validator = new Thread(new Validator());
+      validator.start();
+      LOG.info("Data validation is enabled.");
+    }
+    Thread progressbar = getProgressBarThread();
+    LOG.info("Starting progress bar Thread.");
+    progressbar.start();
+    processor.shutdown();
+    processor.awaitTermination(Integer.MAX_VALUE, TimeUnit.MILLISECONDS);
+    completed = true;
+    progressbar.join();
+    if (validateWrites) {
+      validator.join();
+    }
+    ozoneClient.close();
+    return 0;
+  }
+
+  private Options getOptions() {
+    Options options = new Options();
+
+    OptionBuilder.withDescription("prints usage.");
+    Option optHelp = OptionBuilder.create(HELP);
+
+    OptionBuilder.withArgName("online | offline");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("specifies the mode of " +
+        "Freon run.");
+    Option optMode = OptionBuilder.create(MODE);
+
+    OptionBuilder.withArgName("source url");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("specifies the URL of s3 " +
+        "commoncrawl warc file to be used when the mode is online.");
+    Option optSource = OptionBuilder.create(SOURCE);
+
+    OptionBuilder.withDescription("do random validation of " +
+        "data written into ozone, only subset of data is validated.");
+    Option optValidateWrite = OptionBuilder.create(VALIDATE_WRITE);
+
+
+    OptionBuilder.withDescription("directory where json is created");
+    OptionBuilder.hasArg();
+    Option optJsonDir = OptionBuilder.create(JSON_WRITE_DIRECTORY);
+
+    OptionBuilder.withArgName("value");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("number of threads to be launched " +
+        "for the run");
+    Option optNumOfThreads = OptionBuilder.create(NUM_OF_THREADS);
+
+    OptionBuilder.withArgName("value");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("specifies number of Volumes to be " +
+        "created in offline mode");
+    Option optNumOfVolumes = OptionBuilder.create(NUM_OF_VOLUMES);
+
+    OptionBuilder.withArgName("value");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("specifies number of Buckets to be " +
+        "created per Volume in offline mode");
+    Option optNumOfBuckets = OptionBuilder.create(NUM_OF_BUCKETS);
+
+    OptionBuilder.withArgName("value");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("specifies number of Keys to be " +
+        "created per Bucket in offline mode");
+    Option optNumOfKeys = OptionBuilder.create(NUM_OF_KEYS);
+
+    OptionBuilder.withArgName("value");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("specifies the size of Key in bytes to be " +
+        "created in offline mode");
+    Option optKeySize = OptionBuilder.create(KEY_SIZE);
+
+    OptionBuilder.withArgName(RATIS);
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("Use Ratis as the default replication " +
+        "strategy");
+    Option optRatis = OptionBuilder.create(RATIS);
+
+    options.addOption(optHelp);
+    options.addOption(optMode);
+    options.addOption(optSource);
+    options.addOption(optValidateWrite);
+    options.addOption(optJsonDir);
+    options.addOption(optNumOfThreads);
+    options.addOption(optNumOfVolumes);
+    options.addOption(optNumOfBuckets);
+    options.addOption(optNumOfKeys);
+    options.addOption(optKeySize);
+    options.addOption(optRatis);
+    return options;
+  }
+
+  private void parseOptions(CommandLine cmdLine) {
+    printUsage = cmdLine.hasOption(HELP);
+
+    mode = cmdLine.getOptionValue(MODE, MODE_DEFAULT);
+
+    source = cmdLine.getOptionValue(SOURCE, SOURCE_DEFAULT);
+
+    numOfThreads =
+        cmdLine.getOptionValue(NUM_OF_THREADS, NUM_OF_THREADS_DEFAULT);
+
+    validateWrites = cmdLine.hasOption(VALIDATE_WRITE);
+
+    jsonDir = cmdLine.getOptionValue(JSON_WRITE_DIRECTORY);
+
+    numOfVolumes =
+        cmdLine.getOptionValue(NUM_OF_VOLUMES, NUM_OF_VOLUMES_DEFAULT);
+
+    numOfBuckets =
+        cmdLine.getOptionValue(NUM_OF_BUCKETS, NUM_OF_BUCKETS_DEFAULT);
+
+    numOfKeys = cmdLine.getOptionValue(NUM_OF_KEYS, NUM_OF_KEYS_DEFAULT);
+
+    keySize = cmdLine.hasOption(KEY_SIZE) ?
+        Integer.parseInt(cmdLine.getOptionValue(KEY_SIZE)) : KEY_SIZE_DEFAULT;
+    if (keySize < 1024) {
+      throw new IllegalArgumentException(
+          "keySize can not be less than 1024 bytes");
+    }
+
+    useRatis = cmdLine.hasOption(RATIS);
+
+    type = ReplicationType.STAND_ALONE;
+    factor = ReplicationFactor.ONE;
+
+    if (useRatis) {
+      type = ReplicationType.RATIS;
+      int replicationFactor = Integer.parseInt(cmdLine.getOptionValue(RATIS));
+      switch (replicationFactor) {
+      case 1:
+        factor = ReplicationFactor.ONE;
+        break;
+      case 3:
+        factor = ReplicationFactor.THREE;
+        break;
+      default:
+        throw new IllegalArgumentException("Illegal replication factor:"
+            + replicationFactor);
+      }
+    }
+  }
+
+  private void usage() {
+    System.out.println("Options supported are:");
+    System.out.println("-numOfThreads <value>           "
+        + "number of threads to be launched for the run.");
+    System.out.println("-validateWrites                 "
+        + "do random validation of data written into ozone, " +
+        "only subset of data is validated.");
+    System.out.println("-jsonDir                        "
+        + "directory where json is created.");
+    System.out.println("-mode [online | offline]        "
+        + "specifies the mode in which Freon should run.");
+    System.out.println("-source <url>                   "
+        + "specifies the URL of s3 commoncrawl warc file to " +
+        "be used when the mode is online.");
+    System.out.println("-numOfVolumes <value>           "
+        + "specifies number of Volumes to be created in offline mode");
+    System.out.println("-numOfBuckets <value>           "
+        + "specifies number of Buckets to be created per Volume " +
+        "in offline mode");
+    System.out.println("-numOfKeys <value>              "
+        + "specifies number of Keys to be created per Bucket " +
+        "in offline mode");
+    System.out.println("-keySize <value>                "
+        + "specifies the size of Key in bytes to be created in offline mode");
+    System.out.println("-help                           "
+        + "prints usage.");
+    System.out.println();
+  }
+
+  /**
+   * Adds ShutdownHook to print statistics.
+   */
+  private void addShutdownHook() {
+    Runtime.getRuntime().addShutdownHook(
+        new Thread(() -> printStats(System.out)));
+  }
+
+  private Thread getProgressBarThread() {
+    Supplier<Long> currentValue;
+    long maxValue;
+
+    if (mode.equals("online")) {
+      throw new UnsupportedOperationException("Not yet implemented.");
+    } else {
+      currentValue = () -> numberOfKeysAdded.get();
+      maxValue = Long.parseLong(numOfVolumes) *
+          Long.parseLong(numOfBuckets) *
+          Long.parseLong(numOfKeys);
+    }
+    Thread progressBarThread = new Thread(
+        new ProgressBar(System.out, currentValue, maxValue));
+    progressBarThread.setName("ProgressBar");
+    return progressBarThread;
+  }
+
+  /**
+   * Prints stats of {@link Freon} run to the PrintStream.
+   *
+   * @param out PrintStream
+   */
+  private void printStats(PrintStream out) {
+    long endTime = System.nanoTime() - startTime;
+    String execTime = DurationFormatUtils
+        .formatDuration(TimeUnit.NANOSECONDS.toMillis(endTime),
+            DURATION_FORMAT);
+
+    long volumeTime = TimeUnit.NANOSECONDS.toMillis(volumeCreationTime.get())
+        / threadPoolSize;
+    String prettyAverageVolumeTime =
+        DurationFormatUtils.formatDuration(volumeTime, DURATION_FORMAT);
+
+    long bucketTime = TimeUnit.NANOSECONDS.toMillis(bucketCreationTime.get())
+        / threadPoolSize;
+    String prettyAverageBucketTime =
+        DurationFormatUtils.formatDuration(bucketTime, DURATION_FORMAT);
+
+    long averageKeyCreationTime =
+        TimeUnit.NANOSECONDS.toMillis(keyCreationTime.get())
+            / threadPoolSize;
+    String prettyAverageKeyCreationTime = DurationFormatUtils
+        .formatDuration(averageKeyCreationTime, DURATION_FORMAT);
+
+    long averageKeyWriteTime =
+        TimeUnit.NANOSECONDS.toMillis(keyWriteTime.get()) / threadPoolSize;
+    String prettyAverageKeyWriteTime = DurationFormatUtils
+        .formatDuration(averageKeyWriteTime, DURATION_FORMAT);
+
+    out.println();
+    out.println("***************************************************");
+    out.println("Status: " + (exception ? "Failed" : "Success"));
+    out.println("Git Base Revision: " + VersionInfo.getRevision());
+    out.println("Number of Volumes created: " + numberOfVolumesCreated);
+    out.println("Number of Buckets created: " + numberOfBucketsCreated);
+    out.println("Number of Keys added: " + numberOfKeysAdded);
+    out.println("Ratis replication factor: " + factor.name());
+    out.println("Ratis replication type: " + type.name());
+    out.println(
+        "Average Time spent in volume creation: " + prettyAverageVolumeTime);
+    out.println(
+        "Average Time spent in bucket creation: " + prettyAverageBucketTime);
+    out.println(
+        "Average Time spent in key creation: " + prettyAverageKeyCreationTime);
+    out.println(
+        "Average Time spent in key write: " + prettyAverageKeyWriteTime);
+    out.println("Total bytes written: " + totalBytesWritten);
+    if (validateWrites) {
+      out.println("Total number of writes validated: " +
+          totalWritesValidated);
+      out.println("Writes validated: " +
+          (100.0 * totalWritesValidated / numberOfKeysAdded.get())
+          + " %");
+      out.println("Successful validation: " +
+          writeValidationSuccessCount);
+      out.println("Unsuccessful validation: " +
+          writeValidationFailureCount);
+    }
+    out.println("Total Execution time: " + execTime);
+    out.println("***************************************************");
+
+    if (jsonDir != null) {
+
+      String[][] quantileTime =
+          new String[FreonOps.values().length][QUANTILES + 1];
+      String[] deviations = new String[FreonOps.values().length];
+      String[] means = new String[FreonOps.values().length];
+      for (FreonOps ops : FreonOps.values()) {
+        Snapshot snapshot = histograms.get(ops.ordinal()).getSnapshot();
+        for (int i = 0; i <= QUANTILES; i++) {
+          quantileTime[ops.ordinal()][i] = DurationFormatUtils.formatDuration(
+              TimeUnit.NANOSECONDS
+                  .toMillis((long) snapshot.getValue((1.0 / QUANTILES) * i)),
+              DURATION_FORMAT);
+        }
+        deviations[ops.ordinal()] = DurationFormatUtils.formatDuration(
+            TimeUnit.NANOSECONDS.toMillis((long) snapshot.getStdDev()),
+            DURATION_FORMAT);
+        means[ops.ordinal()] = DurationFormatUtils.formatDuration(
+            TimeUnit.NANOSECONDS.toMillis((long) snapshot.getMean()),
+            DURATION_FORMAT);
+      }
+
+      FreonJobInfo jobInfo = new FreonJobInfo().setExecTime(execTime)
+          .setGitBaseRevision(VersionInfo.getRevision())
+          .setMeanVolumeCreateTime(means[FreonOps.VOLUME_CREATE.ordinal()])
+          .setDeviationVolumeCreateTime(
+              deviations[FreonOps.VOLUME_CREATE.ordinal()])
+          .setTenQuantileVolumeCreateTime(
+              quantileTime[FreonOps.VOLUME_CREATE.ordinal()])
+          .setMeanBucketCreateTime(means[FreonOps.BUCKET_CREATE.ordinal()])
+          .setDeviationBucketCreateTime(
+              deviations[FreonOps.BUCKET_CREATE.ordinal()])
+          .setTenQuantileBucketCreateTime(
+              quantileTime[FreonOps.BUCKET_CREATE.ordinal()])
+          .setMeanKeyCreateTime(means[FreonOps.KEY_CREATE.ordinal()])
+          .setDeviationKeyCreateTime(deviations[FreonOps.KEY_CREATE.ordinal()])
+          .setTenQuantileKeyCreateTime(
+              quantileTime[FreonOps.KEY_CREATE.ordinal()])
+          .setMeanKeyWriteTime(means[FreonOps.KEY_WRITE.ordinal()])
+          .setDeviationKeyWriteTime(deviations[FreonOps.KEY_WRITE.ordinal()])
+          .setTenQuantileKeyWriteTime(
+              quantileTime[FreonOps.KEY_WRITE.ordinal()]);
+      String jsonName =
+          new SimpleDateFormat("yyyyMMddHHmmss").format(Time.now()) + ".json";
+      String jsonPath = jsonDir + "/" + jsonName;
+      FileOutputStream os = null;
+      try {
+        os = new FileOutputStream(jsonPath);
+        ObjectMapper mapper = new ObjectMapper();
+        mapper.setVisibility(PropertyAccessor.FIELD,
+            JsonAutoDetect.Visibility.ANY);
+        ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
+        writer.writeValue(os, jobInfo);
+      } catch (FileNotFoundException e) {
+        out.println("Json File could not be created for the path: " + jsonPath);
+        out.println(e);
+      } catch (IOException e) {
+        out.println("Json object could not be created");
+        out.println(e);
+      } finally {
+        try {
+          if (os != null) {
+            os.close();
+          }
+        } catch (IOException e) {
+          LOG.warn("Could not close the output stream for json", e);
+        }
+      }
+    }
+  }
+
+  /**
+   * Returns the number of volumes created.
+   * @return volume count.
+   */
+  @VisibleForTesting
+  int getNumberOfVolumesCreated() {
+    return numberOfVolumesCreated.get();
+  }
+
+  /**
+   * Returns the number of buckets created.
+   * @return bucket count.
+   */
+  @VisibleForTesting
+  int getNumberOfBucketsCreated() {
+    return numberOfBucketsCreated.get();
+  }
+
+  /**
+   * Returns the number of keys added.
+   * @return keys count.
+   */
+  @VisibleForTesting
+  long getNumberOfKeysAdded() {
+    return numberOfKeysAdded.get();
+  }
+
+  /**
+   * Returns true if random validation of write is enabled.
+   * @return validateWrites
+   */
+  @VisibleForTesting
+  boolean getValidateWrites() {
+    return validateWrites;
+  }
+
+  /**
+   * Returns the number of keys validated.
+   * @return validated key count.
+   */
+  @VisibleForTesting
+  long getTotalKeysValidated() {
+    return totalWritesValidated;
+  }
+
+  /**
+   * Returns the number of successful validation.
+   * @return successful validation count.
+   */
+  @VisibleForTesting
+  long getSuccessfulValidationCount() {
+    return writeValidationSuccessCount;
+  }
+
+  /**
+   * Returns the number of unsuccessful validation.
+   * @return unsuccessful validation count.
+   */
+  @VisibleForTesting
+  long getUnsuccessfulValidationCount() {
+    return writeValidationFailureCount;
+  }
+
+  /**
+   * Returns the length of the common key value initialized.
+   * @return key value length initialized.
+   */
+  @VisibleForTesting
+  long getKeyValueLength(){
+    return keyValue.length;
+  }
+
+  /**
+   * Wrapper to hold ozone key-value pair.
+   */
+  private static class KeyValue {
+
+    /**
+     * Bucket name associated with the key-value.
+     */
+    private OzoneBucket bucket;
+    /**
+     * Key name associated with the key-value.
+     */
+    private String key;
+    /**
+     * Value associated with the key-value.
+     */
+    private byte[] value;
+
+    /**
+     * Constructs a new ozone key-value pair.
+     *
+     * @param key   key part
+     * @param value value part
+     */
+    KeyValue(OzoneBucket bucket, String key, byte[] value) {
+      this.bucket = bucket;
+      this.key = key;
+      this.value = value;
+    }
+  }
+
+  private class OfflineProcessor implements Runnable {
+
+    private int totalBuckets;
+    private int totalKeys;
+    private String volumeName;
+
+    OfflineProcessor(String volumeName) {
+      this.totalBuckets = Integer.parseInt(numOfBuckets);
+      this.totalKeys = Integer.parseInt(numOfKeys);
+      this.volumeName = volumeName;
+    }
+
+    @Override
+    public void run() {
+      LOG.trace("Creating volume: {}", volumeName);
+      long start = System.nanoTime();
+      OzoneVolume volume;
+      try {
+        objectStore.createVolume(volumeName);
+        long volumeCreationDuration = System.nanoTime() - start;
+        volumeCreationTime.getAndAdd(volumeCreationDuration);
+        histograms.get(FreonOps.VOLUME_CREATE.ordinal())
+            .update(volumeCreationDuration);
+        numberOfVolumesCreated.getAndIncrement();
+        volume = objectStore.getVolume(volumeName);
+      } catch (IOException e) {
+        exception = true;
+        LOG.error("Could not create volume", e);
+        return;
+      }
+
+      Long threadKeyWriteTime = 0L;
+      for (int j = 0; j < totalBuckets; j++) {
+        String bucketName = "bucket-" + j + "-" +
+            RandomStringUtils.randomNumeric(5);
+        try {
+          LOG.trace("Creating bucket: {} in volume: {}",
+              bucketName, volume.getName());
+          start = System.nanoTime();
+          volume.createBucket(bucketName);
+          long bucketCreationDuration = System.nanoTime() - start;
+          histograms.get(FreonOps.BUCKET_CREATE.ordinal())
+              .update(bucketCreationDuration);
+          bucketCreationTime.getAndAdd(bucketCreationDuration);
+          numberOfBucketsCreated.getAndIncrement();
+          OzoneBucket bucket = volume.getBucket(bucketName);
+          for (int k = 0; k < totalKeys; k++) {
+            String key = "key-" + k + "-" +
+                RandomStringUtils.randomNumeric(5);
+            byte[] randomValue =
+                DFSUtil.string2Bytes(UUID.randomUUID().toString());
+            try {
+              LOG.trace("Adding key: {} in bucket: {} of volume: {}",
+                  key, bucket, volume);
+              long keyCreateStart = System.nanoTime();
+              OzoneOutputStream os =
+                  bucket.createKey(key, keySize, type, factor);
+              long keyCreationDuration = System.nanoTime() - keyCreateStart;
+              histograms.get(FreonOps.KEY_CREATE.ordinal())
+                  .update(keyCreationDuration);
+              keyCreationTime.getAndAdd(keyCreationDuration);
+              long keyWriteStart = System.nanoTime();
+              os.write(keyValue);
+              os.write(randomValue);
+              os.close();
+              long keyWriteDuration = System.nanoTime() - keyWriteStart;
+              threadKeyWriteTime += keyWriteDuration;
+              histograms.get(FreonOps.KEY_WRITE.ordinal())
+                  .update(keyWriteDuration);
+              totalBytesWritten.getAndAdd(keySize);
+              numberOfKeysAdded.getAndIncrement();
+              if (validateWrites) {
+                byte[] value = ArrayUtils.addAll(keyValue, randomValue);
+                boolean validate = validationQueue.offer(
+                    new KeyValue(bucket, key, value));
+                if (validate) {
+                  LOG.trace("Key {}, is queued for validation.", key);
+                }
+              }
+            } catch (Exception e) {
+              exception = true;
+              LOG.error("Exception while adding key: {} in bucket: {}" +
+                  " of volume: {}.", key, bucket, volume, e);
+            }
+          }
+        } catch (Exception e) {
+          exception = true;
+          LOG.error("Exception while creating bucket: {}" +
+              " in volume: {}.", bucketName, volume, e);
+        }
+      }
+
+      keyWriteTime.getAndAdd(threadKeyWriteTime);
+    }
+
+  }
+
+  private final class FreonJobInfo {
+
+    private String status;
+    private String gitBaseRevision;
+    private String jobStartTime;
+    private String numOfVolumes;
+    private String numOfBuckets;
+    private String numOfKeys;
+    private String numOfThreads;
+    private String mode;
+    private String dataWritten;
+    private String execTime;
+    private String replicationFactor;
+    private String replicationType;
+
+    private int keySize;
+
+    private String totalThroughputPerSecond;
+
+    private String meanVolumeCreateTime;
+    private String deviationVolumeCreateTime;
+    private String[] tenQuantileVolumeCreateTime;
+
+    private String meanBucketCreateTime;
+    private String deviationBucketCreateTime;
+    private String[] tenQuantileBucketCreateTime;
+
+    private String meanKeyCreateTime;
+    private String deviationKeyCreateTime;
+    private String[] tenQuantileKeyCreateTime;
+
+    private String meanKeyWriteTime;
+    private String deviationKeyWriteTime;
+    private String[] tenQuantileKeyWriteTime;
+
+    private FreonJobInfo() {
+      this.status = exception ? "Failed" : "Success";
+      this.numOfVolumes = Freon.this.numOfVolumes;
+      this.numOfBuckets = Freon.this.numOfBuckets;
+      this.numOfKeys = Freon.this.numOfKeys;
+      this.numOfThreads = Freon.this.numOfThreads;
+      this.keySize = Freon.this.keySize;
+      this.mode = Freon.this.mode;
+      this.jobStartTime = Time.formatTime(Freon.this.jobStartTime);
+      this.replicationFactor = Freon.this.factor.name();
+      this.replicationType = Freon.this.type.name();
+
+      long totalBytes =
+          Long.parseLong(numOfVolumes) * Long.parseLong(numOfBuckets) * Long
+              .parseLong(numOfKeys) * keySize;
+      this.dataWritten = getInStorageUnits((double) totalBytes);
+      this.totalThroughputPerSecond = getInStorageUnits(
+          (totalBytes * 1.0) / TimeUnit.NANOSECONDS
+              .toSeconds(Freon.this.keyWriteTime.get() / threadPoolSize));
+    }
+
+    private String getInStorageUnits(Double value) {
+      double size;
+      OzoneQuota.Units unit;
+      if ((long) (value / OzoneConsts.TB) != 0) {
+        size = value / OzoneConsts.TB;
+        unit = OzoneQuota.Units.TB;
+      } else if ((long) (value / OzoneConsts.GB) != 0) {
+        size = value / OzoneConsts.GB;
+        unit = OzoneQuota.Units.GB;
+      } else if ((long) (value / OzoneConsts.MB) != 0) {
+        size = value / OzoneConsts.MB;
+        unit = OzoneQuota.Units.MB;
+      } else if ((long) (value / OzoneConsts.KB) != 0) {
+        size = value / OzoneConsts.KB;
+        unit = OzoneQuota.Units.KB;
+      } else {
+        size = value;
+        unit = OzoneQuota.Units.BYTES;
+      }
+      return size + " " + unit;
+    }
+
+    public FreonJobInfo setGitBaseRevision(String gitBaseRevisionVal) {
+      gitBaseRevision = gitBaseRevisionVal;
+      return this;
+    }
+
+    public FreonJobInfo setExecTime(String execTimeVal) {
+      execTime = execTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setMeanKeyWriteTime(String deviationKeyWriteTimeVal) {
+      this.meanKeyWriteTime = deviationKeyWriteTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setDeviationKeyWriteTime(
+        String deviationKeyWriteTimeVal) {
+      this.deviationKeyWriteTime = deviationKeyWriteTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setTenQuantileKeyWriteTime(
+        String[] tenQuantileKeyWriteTimeVal) {
+      this.tenQuantileKeyWriteTime = tenQuantileKeyWriteTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setMeanKeyCreateTime(String deviationKeyWriteTimeVal) {
+      this.meanKeyCreateTime = deviationKeyWriteTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setDeviationKeyCreateTime(
+        String deviationKeyCreateTimeVal) {
+      this.deviationKeyCreateTime = deviationKeyCreateTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setTenQuantileKeyCreateTime(
+        String[] tenQuantileKeyCreateTimeVal) {
+      this.tenQuantileKeyCreateTime = tenQuantileKeyCreateTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setMeanBucketCreateTime(
+        String deviationKeyWriteTimeVal) {
+      this.meanBucketCreateTime = deviationKeyWriteTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setDeviationBucketCreateTime(
+        String deviationBucketCreateTimeVal) {
+      this.deviationBucketCreateTime = deviationBucketCreateTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setTenQuantileBucketCreateTime(
+        String[] tenQuantileBucketCreateTimeVal) {
+      this.tenQuantileBucketCreateTime = tenQuantileBucketCreateTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setMeanVolumeCreateTime(
+        String deviationKeyWriteTimeVal) {
+      this.meanVolumeCreateTime = deviationKeyWriteTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setDeviationVolumeCreateTime(
+        String deviationVolumeCreateTimeVal) {
+      this.deviationVolumeCreateTime = deviationVolumeCreateTimeVal;
+      return this;
+    }
+
+    public FreonJobInfo setTenQuantileVolumeCreateTime(
+        String[] tenQuantileVolumeCreateTimeVal) {
+      this.tenQuantileVolumeCreateTime = tenQuantileVolumeCreateTimeVal;
+      return this;
+    }
+
+    public String getJobStartTime() {
+      return jobStartTime;
+    }
+
+    public String getNumOfVolumes() {
+      return numOfVolumes;
+    }
+
+    public String getNumOfBuckets() {
+      return numOfBuckets;
+    }
+
+    public String getNumOfKeys() {
+      return numOfKeys;
+    }
+
+    public String getNumOfThreads() {
+      return numOfThreads;
+    }
+
+    public String getMode() {
+      return mode;
+    }
+
+    public String getExecTime() {
+      return execTime;
+    }
+
+    public String getReplicationFactor() {
+      return replicationFactor;
+    }
+
+    public String getReplicationType() {
+      return replicationType;
+    }
+
+    public String getStatus() {
+      return status;
+    }
+
+    public int getKeySize() {
+      return keySize;
+    }
+
+    public String getGitBaseRevision() {
+      return gitBaseRevision;
+    }
+
+    public String getDataWritten() {
+      return dataWritten;
+    }
+
+    public String getTotalThroughputPerSecond() {
+      return totalThroughputPerSecond;
+    }
+
+    public String getMeanVolumeCreateTime() {
+      return meanVolumeCreateTime;
+    }
+
+    public String getDeviationVolumeCreateTime() {
+      return deviationVolumeCreateTime;
+    }
+
+    public String[] getTenQuantileVolumeCreateTime() {
+      return tenQuantileVolumeCreateTime;
+    }
+
+    public String getMeanBucketCreateTime() {
+      return meanBucketCreateTime;
+    }
+
+    public String getDeviationBucketCreateTime() {
+      return deviationBucketCreateTime;
+    }
+
+    public String[] getTenQuantileBucketCreateTime() {
+      return tenQuantileBucketCreateTime;
+    }
+
+    public String getMeanKeyCreateTime() {
+      return meanKeyCreateTime;
+    }
+
+    public String getDeviationKeyCreateTime() {
+      return deviationKeyCreateTime;
+    }
+
+    public String[] getTenQuantileKeyCreateTime() {
+      return tenQuantileKeyCreateTime;
+    }
+
+    public String getMeanKeyWriteTime() {
+      return meanKeyWriteTime;
+    }
+
+    public String getDeviationKeyWriteTime() {
+      return deviationKeyWriteTime;
+    }
+
+    public String[] getTenQuantileKeyWriteTime() {
+      return tenQuantileKeyWriteTime;
+    }
+  }
+
+  private class ProgressBar implements Runnable {
+
+    private static final long REFRESH_INTERVAL = 1000L;
+
+    private PrintStream stream;
+    private Supplier<Long> currentValue;
+    private long maxValue;
+
+    ProgressBar(PrintStream stream, Supplier<Long> currentValue,
+        long maxValue) {
+      this.stream = stream;
+      this.currentValue = currentValue;
+      this.maxValue = maxValue;
+    }
+
+    @Override
+    public void run() {
+      try {
+        stream.println();
+        long value;
+        while ((value = currentValue.get()) < maxValue) {
+          print(value);
+          if (completed) {
+            break;
+          }
+          Thread.sleep(REFRESH_INTERVAL);
+        }
+        if (exception) {
+          stream.println();
+          stream.println("Incomplete termination, " +
+              "check log for exception.");
+        } else {
+          print(maxValue);
+        }
+        stream.println();
+      } catch (InterruptedException e) {
+      }
+    }
+
+    /**
+     * Given current value prints the progress bar.
+     *
+     * @param value
+     */
+    private void print(long value) {
+      stream.print('\r');
+      double percent = 100.0 * value / maxValue;
+      StringBuilder sb = new StringBuilder();
+      sb.append(" " + String.format("%.2f", percent) + "% |");
+
+      for (int i = 0; i <= percent; i++) {
+        sb.append('â–ˆ');
+      }
+      for (int j = 0; j < 100 - percent; j++) {
+        sb.append(' ');
+      }
+      sb.append("|  ");
+      sb.append(value + "/" + maxValue);
+      long timeInSec = TimeUnit.SECONDS.convert(
+          System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
+      String timeToPrint = String.format("%d:%02d:%02d", timeInSec / 3600,
+          (timeInSec % 3600) / 60, timeInSec % 60);
+      sb.append(" Time: " + timeToPrint);
+      stream.print(sb);
+    }
+  }
+
+  /**
+   * Validates the write done in ozone cluster.
+   */
+  private class Validator implements Runnable {
+
+    @Override
+    public void run() {
+      while (!completed) {
+        try {
+          KeyValue kv = validationQueue.poll(5, TimeUnit.SECONDS);
+          if (kv != null) {
+
+            OzoneInputStream is = kv.bucket.readKey(kv.key);
+            byte[] value = new byte[kv.value.length];
+            int length = is.read(value);
+            totalWritesValidated++;
+            if (length == kv.value.length && Arrays.equals(value, kv.value)) {
+              writeValidationSuccessCount++;
+            } else {
+              writeValidationFailureCount++;
+              LOG.warn("Data validation error for key {}/{}/{}",
+                  kv.bucket.getVolumeName(), kv.bucket, kv.key);
+              LOG.warn("Expected: {}, Actual: {}",
+                  DFSUtil.bytes2String(kv.value),
+                  DFSUtil.bytes2String(value));
+            }
+          }
+        } catch (IOException | InterruptedException ex) {
+          LOG.error("Exception while validating write: " + ex.getMessage());
+        }
+      }
+    }
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/package-info.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/package-info.java
new file mode 100644
index 0000000..3ef9123
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.freon;
+
+/**
+ This package contains class used for testing and benchmarking ozone cluster.
+ */
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java
new file mode 100644
index 0000000..70d80d5
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.genesis;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.scm.container.states.ContainerStateMap;
+import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import org.apache.hadoop.util.Time;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.io.IOException;
+import java.util.UUID;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.Objects;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.OPEN;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.CLOSED;
+
+@State(Scope.Thread)
+public class BenchMarkContainerStateMap {
+  private ContainerStateMap stateMap;
+  private AtomicInteger containerID;
+
+  @Setup(Level.Trial)
+  public void initialize() throws IOException {
+    stateMap = new ContainerStateMap();
+    Pipeline pipeline = createSingleNodePipeline(UUID.randomUUID().toString());
+    Preconditions.checkNotNull(pipeline, "Pipeline cannot be null.");
+    int currentCount = 1;
+    for (int x = 1; x < 1000; x++) {
+      try {
+        ContainerInfo containerInfo = new ContainerInfo.Builder()
+            .setContainerName(pipeline.getContainerName()).setState(CLOSED)
+            .setPipeline(pipeline)
+            // This is bytes allocated for blocks inside container, not the
+            // container size
+            .setAllocatedBytes(0).setUsedBytes(0).setNumberOfKeys(0)
+            .setStateEnterTime(Time.monotonicNow()).setOwner("OZONE")
+            .setContainerID(x).build();
+        stateMap.addContainer(containerInfo);
+        currentCount++;
+      } catch (SCMException e) {
+        e.printStackTrace();
+      }
+    }
+    for (int y = currentCount; y < 2000; y++) {
+      try {
+        ContainerInfo containerInfo = new ContainerInfo.Builder()
+            .setContainerName(pipeline.getContainerName()).setState(OPEN)
+            .setPipeline(pipeline)
+            // This is bytes allocated for blocks inside container, not the
+            // container size
+            .setAllocatedBytes(0).setUsedBytes(0).setNumberOfKeys(0)
+            .setStateEnterTime(Time.monotonicNow()).setOwner("OZONE")
+            .setContainerID(y).build();
+        stateMap.addContainer(containerInfo);
+        currentCount++;
+      } catch (SCMException e) {
+        e.printStackTrace();
+      }
+    }
+    try {
+      ContainerInfo containerInfo = new ContainerInfo.Builder()
+          .setContainerName(pipeline.getContainerName()).setState(OPEN)
+          .setPipeline(pipeline)
+          // This is bytes allocated for blocks inside container, not the
+          // container size
+          .setAllocatedBytes(0).setUsedBytes(0).setNumberOfKeys(0)
+          .setStateEnterTime(Time.monotonicNow()).setOwner("OZONE")
+          .setContainerID(currentCount++).build();
+      stateMap.addContainer(containerInfo);
+    } catch (SCMException e) {
+      e.printStackTrace();
+    }
+
+    containerID = new AtomicInteger(currentCount++);
+
+  }
+
+  public static Pipeline createSingleNodePipeline(String containerName)
+      throws IOException {
+    return createPipeline(containerName, 1);
+  }
+
+  /**
+   * Create a pipeline with single node replica.
+   *
+   * @return Pipeline with single node in it.
+   * @throws IOException
+   */
+  public static Pipeline createPipeline(String containerName, int numNodes)
+      throws IOException {
+    Preconditions.checkArgument(numNodes >= 1);
+    final List<DatanodeDetails> ids = new ArrayList<>(numNodes);
+    for (int i = 0; i < numNodes; i++) {
+      ids.add(GenesisUtil.createDatanodeDetails(UUID.randomUUID().toString()));
+    }
+    return createPipeline(containerName, ids);
+  }
+
+  public static Pipeline createPipeline(String containerName,
+      Iterable<DatanodeDetails> ids) throws IOException {
+    Objects.requireNonNull(ids, "ids == null");
+    final Iterator<DatanodeDetails> i = ids.iterator();
+    Preconditions.checkArgument(i.hasNext());
+    final DatanodeDetails leader = i.next();
+    String pipelineName = "TEST-" + UUID.randomUUID().toString().substring(5);
+    final PipelineChannel pipelineChannel =
+        new PipelineChannel(leader.getUuidString(), OPEN,
+            ReplicationType.STAND_ALONE, ReplicationFactor.ONE, pipelineName);
+    pipelineChannel.addMember(leader);
+    for (; i.hasNext();) {
+      pipelineChannel.addMember(i.next());
+    }
+    return new Pipeline(containerName, pipelineChannel);
+  }
+
+  @Benchmark
+  public void createContainerBenchMark(BenchMarkContainerStateMap state,
+      Blackhole bh) throws IOException {
+    Pipeline pipeline = createSingleNodePipeline(UUID.randomUUID().toString());
+    int cid = state.containerID.incrementAndGet();
+    ContainerInfo containerInfo = new ContainerInfo.Builder()
+        .setContainerName(pipeline.getContainerName()).setState(CLOSED)
+        .setPipeline(pipeline)
+        // This is bytes allocated for blocks inside container, not the
+        // container size
+        .setAllocatedBytes(0).setUsedBytes(0).setNumberOfKeys(0)
+        .setStateEnterTime(Time.monotonicNow()).setOwner("OZONE")
+        .setContainerID(cid).build();
+    state.stateMap.addContainer(containerInfo);
+  }
+
+  @Benchmark
+  public void getMatchingContainerBenchMark(BenchMarkContainerStateMap state,
+      Blackhole bh) {
+    bh.consume(state.stateMap
+        .getMatchingContainerIDs(OPEN, "BILBO", ReplicationFactor.ONE,
+            ReplicationType.STAND_ALONE));
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkDatanodeDispatcher.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkDatanodeDispatcher.java
new file mode 100644
index 0000000..468fee5
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkDatanodeDispatcher.java
@@ -0,0 +1,280 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.genesis;
+
+import com.google.protobuf.ByteString;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl;
+import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl;
+import org.apache.hadoop.ozone.container.common.impl.Dispatcher;
+import org.apache.hadoop.ozone.container.common.impl.KeyManagerImpl;
+import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager;
+
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.TearDown;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX;
+
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.CreateContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ReadChunkRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.WriteChunkRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.PutKeyRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.GetKeyRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData;
+
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+
+@State(Scope.Benchmark)
+public class BenchMarkDatanodeDispatcher {
+
+  private String baseDir;
+  private String datanodeUuid;
+  private Dispatcher dispatcher;
+  private PipelineChannel pipelineChannel;
+  private ByteString data;
+  private Random random;
+  private AtomicInteger containerCount;
+  private AtomicInteger keyCount;
+  private AtomicInteger chunkCount;
+
+  @Setup(Level.Trial)
+  public void initialize() throws IOException {
+    datanodeUuid = UUID.randomUUID().toString();
+    pipelineChannel = new PipelineChannel("127.0.0.1",
+        LifeCycleState.OPEN, ReplicationType.STAND_ALONE,
+        ReplicationFactor.ONE, "SA-" + UUID.randomUUID());
+
+    // 1 MB of data
+    data = ByteString.copyFromUtf8(RandomStringUtils.randomAscii(1048576));
+    random  = new Random();
+    Configuration conf = new OzoneConfiguration();
+    ContainerManager manager = new ContainerManagerImpl();
+    baseDir = System.getProperty("java.io.tmpdir") + File.separator +
+        datanodeUuid;
+
+    // data directory
+    conf.set("dfs.datanode.data.dir", baseDir + File.separator + "data");
+
+    // metadata directory
+    StorageLocation metadataDir = StorageLocation.parse(
+        baseDir+ File.separator + CONTAINER_ROOT_PREFIX);
+    List<StorageLocation> locations = Arrays.asList(metadataDir);
+
+    manager
+        .init(conf, locations, GenesisUtil.createDatanodeDetails(datanodeUuid));
+    manager.setChunkManager(new ChunkManagerImpl(manager));
+    manager.setKeyManager(new KeyManagerImpl(manager, conf));
+
+    dispatcher = new Dispatcher(manager, conf);
+    dispatcher.init();
+
+    containerCount = new AtomicInteger();
+    keyCount = new AtomicInteger();
+    chunkCount = new AtomicInteger();
+
+    // Create containers
+    for (int x = 0; x < 100; x++) {
+      String containerName = "container-" + containerCount.getAndIncrement();
+      dispatcher.dispatch(getCreateContainerCommand(containerName));
+    }
+    // Add chunk and keys to the containers
+    for (int x = 0; x < 50; x++) {
+      String chunkName = "chunk-" + chunkCount.getAndIncrement();
+      String keyName = "key-" + keyCount.getAndIncrement();
+      for (int y = 0; y < 100; y++) {
+        String containerName = "container-" + y;
+        dispatcher.dispatch(getWriteChunkCommand(containerName, chunkName));
+        dispatcher
+            .dispatch(getPutKeyCommand(containerName, chunkName, keyName));
+      }
+    }
+  }
+
+  @TearDown(Level.Trial)
+  public void cleanup() throws IOException {
+    dispatcher.shutdown();
+    FileUtils.deleteDirectory(new File(baseDir));
+  }
+
+  private ContainerCommandRequestProto getCreateContainerCommand(
+      String containerName) {
+    CreateContainerRequestProto.Builder createRequest =
+        CreateContainerRequestProto.newBuilder();
+    createRequest.setPipeline(
+        new Pipeline(containerName, pipelineChannel).getProtobufMessage());
+    createRequest.setContainerData(
+        ContainerData.newBuilder().setName(containerName).build());
+
+    ContainerCommandRequestProto.Builder request =
+        ContainerCommandRequestProto.newBuilder();
+    request.setCmdType(ContainerProtos.Type.CreateContainer);
+    request.setCreateContainer(createRequest);
+    request.setDatanodeUuid(datanodeUuid);
+    request.setTraceID(containerName + "-trace");
+    return request.build();
+  }
+
+  private ContainerCommandRequestProto getWriteChunkCommand(
+      String containerName, String key) {
+
+    WriteChunkRequestProto.Builder writeChunkRequest = WriteChunkRequestProto
+        .newBuilder()
+        .setPipeline(
+            new Pipeline(containerName, pipelineChannel).getProtobufMessage())
+        .setKeyName(key)
+        .setChunkData(getChunkInfo(containerName, key))
+        .setData(data);
+
+    ContainerCommandRequestProto.Builder request = ContainerCommandRequestProto
+        .newBuilder();
+    request.setCmdType(ContainerProtos.Type.WriteChunk)
+        .setTraceID(containerName + "-" + key +"-trace")
+        .setDatanodeUuid(datanodeUuid)
+        .setWriteChunk(writeChunkRequest);
+    return request.build();
+  }
+
+  private ContainerCommandRequestProto getReadChunkCommand(
+      String containerName, String key) {
+    ReadChunkRequestProto.Builder readChunkRequest = ReadChunkRequestProto
+        .newBuilder()
+        .setPipeline(
+            new Pipeline(containerName, pipelineChannel).getProtobufMessage())
+        .setKeyName(key)
+        .setChunkData(getChunkInfo(containerName, key));
+    ContainerCommandRequestProto.Builder request = ContainerCommandRequestProto
+        .newBuilder();
+    request.setCmdType(ContainerProtos.Type.ReadChunk)
+        .setTraceID(containerName + "-" + key +"-trace")
+        .setDatanodeUuid(datanodeUuid)
+        .setReadChunk(readChunkRequest);
+    return request.build();
+  }
+
+  private ContainerProtos.ChunkInfo getChunkInfo(
+      String containerName, String key) {
+    ContainerProtos.ChunkInfo.Builder builder =
+        ContainerProtos.ChunkInfo.newBuilder()
+            .setChunkName(
+                DigestUtils.md5Hex(key) + "_stream_" + containerName + "_chunk_"
+                    + key)
+            .setOffset(0).setLen(data.size());
+    return builder.build();
+  }
+
+  private ContainerCommandRequestProto getPutKeyCommand(
+      String containerName, String chunkKey, String key) {
+    PutKeyRequestProto.Builder putKeyRequest = PutKeyRequestProto
+        .newBuilder()
+        .setPipeline(
+            new Pipeline(containerName, pipelineChannel).getProtobufMessage())
+        .setKeyData(getKeyData(containerName, chunkKey, key));
+    ContainerCommandRequestProto.Builder request = ContainerCommandRequestProto
+        .newBuilder();
+    request.setCmdType(ContainerProtos.Type.PutKey)
+        .setTraceID(containerName + "-" + key +"-trace")
+        .setDatanodeUuid(datanodeUuid)
+        .setPutKey(putKeyRequest);
+    return request.build();
+  }
+
+  private ContainerCommandRequestProto getGetKeyCommand(
+      String containerName, String chunkKey, String key) {
+    GetKeyRequestProto.Builder readKeyRequest = GetKeyRequestProto.newBuilder()
+        .setPipeline(
+            new Pipeline(containerName, pipelineChannel).getProtobufMessage())
+        .setKeyData(getKeyData(containerName, chunkKey, key));
+    ContainerCommandRequestProto.Builder request = ContainerCommandRequestProto
+        .newBuilder()
+        .setCmdType(ContainerProtos.Type.GetKey)
+        .setTraceID(containerName + "-" + key +"-trace")
+        .setDatanodeUuid(datanodeUuid)
+        .setGetKey(readKeyRequest);
+    return request.build();
+  }
+
+  private ContainerProtos.KeyData getKeyData(
+      String containerName, String chunkKey, String key) {
+    ContainerProtos.KeyData.Builder builder =  ContainerProtos.KeyData
+        .newBuilder()
+        .setContainerName(containerName)
+        .setName(key)
+        .addChunks(getChunkInfo(containerName, chunkKey));
+    return builder.build();
+  }
+
+  @Benchmark
+  public void createContainer(BenchMarkDatanodeDispatcher bmdd) {
+    bmdd.dispatcher.dispatch(getCreateContainerCommand(
+        "container-" + containerCount.getAndIncrement()));
+  }
+
+
+  @Benchmark
+  public void writeChunk(BenchMarkDatanodeDispatcher bmdd) {
+    String containerName = "container-" + random.nextInt(containerCount.get());
+    bmdd.dispatcher.dispatch(getWriteChunkCommand(
+        containerName, "chunk-" + chunkCount.getAndIncrement()));
+  }
+
+  @Benchmark
+  public void readChunk(BenchMarkDatanodeDispatcher bmdd) {
+    String containerName = "container-" + random.nextInt(containerCount.get());
+    String chunkKey = "chunk-" + random.nextInt(chunkCount.get());
+    bmdd.dispatcher.dispatch(getReadChunkCommand(containerName, chunkKey));
+  }
+
+  @Benchmark
+  public void putKey(BenchMarkDatanodeDispatcher bmdd) {
+    String containerName = "container-" + random.nextInt(containerCount.get());
+    String chunkKey = "chunk-" + random.nextInt(chunkCount.get());
+    bmdd.dispatcher.dispatch(getPutKeyCommand(
+        containerName, chunkKey, "key-" + keyCount.getAndIncrement()));
+  }
+
+  @Benchmark
+  public void getKey(BenchMarkDatanodeDispatcher bmdd) {
+    String containerName = "container-" + random.nextInt(containerCount.get());
+    String chunkKey = "chunk-" + random.nextInt(chunkCount.get());
+    String key = "key-" + random.nextInt(keyCount.get());
+    bmdd.dispatcher.dispatch(getGetKeyCommand(containerName, chunkKey, key));
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreReads.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreReads.java
new file mode 100644
index 0000000..fc3dbcb
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreReads.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.genesis;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.utils.MetadataStore;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.io.IOException;
+import java.nio.charset.Charset;
+
+import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_10MB_TYPE;
+import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_1GB_TYPE;
+import static org.apache.hadoop.ozone.genesis.GenesisUtil.CLOSED_TYPE;
+import static org.apache.hadoop.ozone.genesis.GenesisUtil.DEFAULT_TYPE;
+
+@State(Scope.Thread)
+public class BenchMarkMetadataStoreReads {
+
+  private static final int DATA_LEN = 1024;
+  private static final long MAX_KEYS = 1024 * 10;
+
+  private MetadataStore store;
+
+  @Param({DEFAULT_TYPE, CACHE_10MB_TYPE, CACHE_1GB_TYPE, CLOSED_TYPE})
+  private String type;
+
+  @Setup
+  public void initialize() throws IOException {
+    store = GenesisUtil.getMetadataStore(this.type);
+    byte[] data = RandomStringUtils.randomAlphanumeric(DATA_LEN)
+        .getBytes(Charset.forName("UTF-8"));
+    for (int x = 0; x < MAX_KEYS; x++) {
+      store.put(Long.toHexString(x).getBytes(Charset.forName("UTF-8")), data);
+    }
+    if (type.compareTo(CLOSED_TYPE) == 0) {
+      store.compactDB();
+    }
+  }
+
+  @Benchmark
+  public void test(Blackhole bh) throws IOException {
+    long x = org.apache.commons.lang3.RandomUtils.nextLong(0L, MAX_KEYS);
+    bh.consume(
+        store.get(Long.toHexString(x).getBytes(Charset.forName("UTF-8"))));
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreWrites.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreWrites.java
new file mode 100644
index 0000000..f496a7d
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreWrites.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.genesis;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.utils.MetadataStore;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+import java.io.IOException;
+import java.nio.charset.Charset;
+
+import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_10MB_TYPE;
+import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_1GB_TYPE;
+import static org.apache.hadoop.ozone.genesis.GenesisUtil.DEFAULT_TYPE;
+
+@State(Scope.Thread)
+public class BenchMarkMetadataStoreWrites {
+
+
+  private static final int DATA_LEN = 1024;
+  private static final long MAX_KEYS = 1024 * 10;
+
+  private MetadataStore store;
+  private byte[] data;
+
+  @Param({DEFAULT_TYPE, CACHE_10MB_TYPE, CACHE_1GB_TYPE})
+  private String type;
+
+  @Setup
+  public void initialize() throws IOException {
+    data = RandomStringUtils.randomAlphanumeric(DATA_LEN)
+        .getBytes(Charset.forName("UTF-8"));
+    store = GenesisUtil.getMetadataStore(this.type);
+  }
+
+  @Benchmark
+  public void test() throws IOException {
+    long x = org.apache.commons.lang3.RandomUtils.nextLong(0L, MAX_KEYS);
+    store.put(Long.toHexString(x).getBytes(Charset.forName("UTF-8")), data);
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkRocksDbStore.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkRocksDbStore.java
new file mode 100644
index 0000000..0890e4b
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkRocksDbStore.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+package org.apache.hadoop.ozone.genesis;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.StorageUnit;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.RocksDBStore;
+import org.openjdk.jmh.annotations.*;
+import org.openjdk.jmh.infra.Blackhole;
+import org.rocksdb.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.nio.file.Paths;
+
+@State(Scope.Thread)
+public class BenchMarkRocksDbStore {
+  private static final int DATA_LEN = 1024;
+  private static final long MAX_KEYS = 1024 * 10;
+  private static final int DB_FILE_LEN = 7;
+  private static final String TMP_DIR = "java.io.tmpdir";
+
+  private MetadataStore store;
+  private byte[] data;
+
+  @Param(value = {"8"})
+  private String blockSize; // 4KB default
+
+  @Param(value = {"64"})
+  private String writeBufferSize; //64 MB default
+
+  @Param(value = {"16"})
+  private String maxWriteBufferNumber; // 2 default
+
+  @Param(value = {"4"})
+  private String maxBackgroundFlushes; // 1 default
+
+  @Param(value = {"512"})
+  private String maxBytesForLevelBase;
+
+  @Param(value = {"4"})
+  private String backgroundThreads;
+
+  @Param(value = {"5000"})
+  private String maxOpenFiles;
+
+  @Setup(Level.Trial)
+  public void initialize() throws IOException {
+    data = RandomStringUtils.randomAlphanumeric(DATA_LEN)
+        .getBytes(Charset.forName("UTF-8"));
+    org.rocksdb.Options opts = new org.rocksdb.Options();
+    File dbFile = Paths.get(System.getProperty(TMP_DIR))
+        .resolve(RandomStringUtils.randomNumeric(DB_FILE_LEN))
+        .toFile();
+    opts.setCreateIfMissing(true);
+    opts.setWriteBufferSize(
+        (long) StorageUnit.MB.toBytes(Long.valueOf(writeBufferSize)));
+    opts.setMaxWriteBufferNumber(Integer.valueOf(maxWriteBufferNumber));
+    opts.setMaxBackgroundFlushes(Integer.valueOf(maxBackgroundFlushes));
+    BlockBasedTableConfig tableConfig = new BlockBasedTableConfig();
+    tableConfig.setBlockSize(
+        (long) StorageUnit.KB.toBytes(Long.valueOf(blockSize)));
+    opts.setMaxOpenFiles(Integer.valueOf(maxOpenFiles));
+    opts.setMaxBytesForLevelBase(
+        (long) StorageUnit.MB.toBytes(Long.valueOf(maxBytesForLevelBase)));
+    opts.setCompactionStyle(CompactionStyle.UNIVERSAL);
+    opts.setLevel0FileNumCompactionTrigger(10);
+    opts.setLevel0SlowdownWritesTrigger(20);
+    opts.setLevel0StopWritesTrigger(40);
+    opts.setTargetFileSizeBase(
+        (long) StorageUnit.MB.toBytes(Long.valueOf(maxBytesForLevelBase)) / 10);
+    opts.setMaxBackgroundCompactions(8);
+    opts.setUseFsync(false);
+    opts.setBytesPerSync(8388608);
+    org.rocksdb.Filter bloomFilter = new org.rocksdb.BloomFilter(20);
+    tableConfig.setCacheIndexAndFilterBlocks(true);
+    tableConfig.setIndexType(IndexType.kHashSearch);
+    tableConfig.setFilter(bloomFilter);
+    opts.setTableFormatConfig(tableConfig);
+    opts.useCappedPrefixExtractor(4);
+    store = new RocksDBStore(dbFile, opts);
+  }
+
+  @TearDown(Level.Trial)
+  public void cleanup() throws IOException {
+    store.destroy();
+    FileUtils.deleteDirectory(new File(TMP_DIR));
+  }
+
+  @Benchmark
+  public void test(Blackhole bh) throws IOException {
+    long x = org.apache.commons.lang3.RandomUtils.nextLong(0L, MAX_KEYS);
+    store.put(Long.toHexString(x).getBytes(Charset.forName("UTF-8")), data);
+    bh.consume(
+        store.get(Long.toHexString(x).getBytes(Charset.forName("UTF-8"))));
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/Genesis.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/Genesis.java
new file mode 100644
index 0000000..5efa12a
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/Genesis.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.ozone.genesis;
+
+import org.openjdk.jmh.runner.Runner;
+import org.openjdk.jmh.runner.RunnerException;
+import org.openjdk.jmh.runner.options.Options;
+import org.openjdk.jmh.runner.options.OptionsBuilder;
+
+/**
+ * Main class that executes a set of HDDS/Ozone benchmarks.
+ * We purposefully don't use the runner and tools classes from Hadoop.
+ * There are some name collisions with OpenJDK JMH package.
+ * <p>
+ * Hence, these classes do not use the Tool/Runner pattern of standard Hadoop
+ * CLI.
+ */
+public final class Genesis {
+
+  private Genesis() {
+  }
+
+  public static void main(String[] args) throws RunnerException {
+    Options opt = new OptionsBuilder()
+        .include(BenchMarkContainerStateMap.class.getSimpleName())
+        .include(BenchMarkMetadataStoreReads.class.getSimpleName())
+        .include(BenchMarkMetadataStoreWrites.class.getSimpleName())
+        .include(BenchMarkDatanodeDispatcher.class.getSimpleName())
+        .include(BenchMarkRocksDbStore.class.getSimpleName())
+        .warmupIterations(5)
+        .measurementIterations(20)
+        .shouldDoGC(true)
+        .forks(1)
+        .build();
+
+    new Runner(opt).run();
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/GenesisUtil.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/GenesisUtil.java
new file mode 100644
index 0000000..611b62d
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/GenesisUtil.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.genesis;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.StorageUnit;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.Random;
+
+/**
+ * Utility class for benchmark test cases.
+ */
+public final class GenesisUtil {
+
+  private GenesisUtil() {
+    // private constructor.
+  }
+
+  public static final String DEFAULT_TYPE = "default";
+  public static final String CACHE_10MB_TYPE = "Cache10MB";
+  public static final String CACHE_1GB_TYPE = "Cache1GB";
+  public static final String CLOSED_TYPE = "ClosedContainer";
+
+  private static final int DB_FILE_LEN = 7;
+  private static final String TMP_DIR = "java.io.tmpdir";
+
+  public static MetadataStore getMetadataStore(String dbType)
+      throws IOException {
+    Configuration conf = new Configuration();
+    MetadataStoreBuilder builder = MetadataStoreBuilder.newBuilder();
+    builder.setConf(conf);
+    builder.setCreateIfMissing(true);
+    builder.setDbFile(
+        Paths.get(System.getProperty(TMP_DIR))
+            .resolve(RandomStringUtils.randomNumeric(DB_FILE_LEN))
+            .toFile());
+    switch (dbType) {
+    case DEFAULT_TYPE:
+      break;
+    case CLOSED_TYPE:
+      break;
+    case CACHE_10MB_TYPE:
+      builder.setCacheSize((long) StorageUnit.MB.toBytes(10));
+      break;
+    case CACHE_1GB_TYPE:
+      builder.setCacheSize((long) StorageUnit.GB.toBytes(1));
+      break;
+    default:
+      throw new IllegalStateException("Unknown type: " + dbType);
+    }
+    return builder.build();
+  }
+
+  public static DatanodeDetails createDatanodeDetails(String uuid) {
+    Random random = new Random();
+    String ipAddress =
+        random.nextInt(256) + "." + random.nextInt(256) + "." + random
+            .nextInt(256) + "." + random.nextInt(256);
+
+    DatanodeDetails.Builder builder = DatanodeDetails.newBuilder();
+    builder.setUuid(uuid)
+        .setHostName("localhost")
+        .setIpAddress(ipAddress)
+        .setContainerPort(0)
+        .setRatisPort(0)
+        .setOzoneRestPort(0);
+    return builder.build();
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/package-info.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/package-info.java
new file mode 100644
index 0000000..a7c8ee2
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/package-info.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+/**
+ * Zephyr contains a set of benchmarks for Ozone. This is a command line tool
+ * that can be run by end users to get a sense of what kind of performance
+ * the system is capable of; Since Ozone is a new system, these benchmarks
+ * will allow us to correlate a base line to real world performance.
+ */
+package org.apache.hadoop.ozone.genesis;
\ No newline at end of file
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
new file mode 100644
index 0000000..edfbf02
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
@@ -0,0 +1,709 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm.cli;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.DFSUtilClient;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.OzoneAclInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.BucketInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.KeyInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.VolumeInfo;
+import org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.VolumeList;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.apache.hadoop.ozone.OzoneConsts.BLOCK_DB;
+import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_DB_SUFFIX;
+import static org.apache.hadoop.ozone.OzoneConsts.KSM_DB_NAME;
+import static org.apache.hadoop.ozone.OzoneConsts.KSM_USER_PREFIX;
+import static org.apache.hadoop.ozone.OzoneConsts.KSM_BUCKET_PREFIX;
+import static org.apache.hadoop.ozone.OzoneConsts.KSM_VOLUME_PREFIX;
+import static org.apache.hadoop.ozone.OzoneConsts.NODEPOOL_DB;
+import static org.apache.hadoop.ozone.OzoneConsts.OPEN_CONTAINERS_DB;
+
+/**
+ * This is the CLI that can be use to convert an ozone metadata DB into
+ * a sqlite DB file.
+ *
+ * NOTE: user should use this CLI in an offline fashion. Namely, this should not
+ * be used to convert a DB that is currently being used by Ozone. Instead,
+ * this should be used to debug and diagnosis closed DB instances.
+ *
+ */
+public class SQLCLI  extends Configured implements Tool {
+
+  private Options options;
+  private BasicParser parser;
+  private final Charset encoding = Charset.forName("UTF-8");
+  private final OzoneConfiguration conf;
+
+  // for container.db
+  private static final String CREATE_CONTAINER_INFO =
+      "CREATE TABLE containerInfo (" +
+          "containerName TEXT PRIMARY KEY NOT NULL, " +
+          "leaderUUID TEXT NOT NULL)";
+  private static final String CREATE_CONTAINER_MEMBERS =
+      "CREATE TABLE containerMembers (" +
+          "containerName TEXT NOT NULL, " +
+          "datanodeUUID TEXT NOT NULL," +
+          "PRIMARY KEY(containerName, datanodeUUID));";
+  private static final String CREATE_DATANODE_INFO =
+      "CREATE TABLE datanodeInfo (" +
+          "hostName TEXT NOT NULL, " +
+          "datanodeUUId TEXT PRIMARY KEY NOT NULL," +
+          "ipAddress TEXT, " +
+          "containerPort INTEGER NOT NULL);";
+  private static final String INSERT_CONTAINER_INFO =
+      "INSERT INTO containerInfo (containerName, leaderUUID) " +
+          "VALUES (\"%s\", \"%s\")";
+  private static final String INSERT_DATANODE_INFO =
+      "INSERT INTO datanodeInfo (hostname, datanodeUUid, ipAddress, " +
+          "containerPort) " +
+          "VALUES (\"%s\", \"%s\", \"%s\", \"%d\")";
+  private static final String INSERT_CONTAINER_MEMBERS =
+      "INSERT INTO containerMembers (containerName, datanodeUUID) " +
+          "VALUES (\"%s\", \"%s\")";
+  // for block.db
+  private static final String CREATE_BLOCK_CONTAINER =
+      "CREATE TABLE blockContainer (" +
+          "blockKey TEXT PRIMARY KEY NOT NULL, " +
+          "containerName TEXT NOT NULL)";
+  private static final String INSERT_BLOCK_CONTAINER =
+      "INSERT INTO blockContainer (blockKey, containerName) " +
+          "VALUES (\"%s\", \"%s\")";
+  // for nodepool.db
+  private static final String CREATE_NODE_POOL =
+      "CREATE TABLE nodePool (" +
+          "datanodeUUID TEXT NOT NULL," +
+          "poolName TEXT NOT NULL," +
+          "PRIMARY KEY(datanodeUUID, poolName))";
+  private static final String INSERT_NODE_POOL =
+      "INSERT INTO nodePool (datanodeUUID, poolName) " +
+          "VALUES (\"%s\", \"%s\")";
+  // and reuse CREATE_DATANODE_INFO and INSERT_DATANODE_INFO
+  // for openContainer.db
+  private static final String CREATE_OPEN_CONTAINER =
+      "CREATE TABLE openContainer (" +
+          "containerName TEXT PRIMARY KEY NOT NULL, " +
+          "containerUsed INTEGER NOT NULL)";
+  private static final String INSERT_OPEN_CONTAINER =
+      "INSERT INTO openContainer (containerName, containerUsed) " +
+          "VALUES (\"%s\", \"%s\")";
+
+  // for ksm.db
+  private static final String CREATE_VOLUME_LIST =
+      "CREATE TABLE volumeList (" +
+          "userName TEXT NOT NULL," +
+          "volumeName TEXT NOT NULL," +
+          "PRIMARY KEY (userName, volumeName))";
+  private static final String INSERT_VOLUME_LIST =
+      "INSERT INTO volumeList (userName, volumeName) " +
+          "VALUES (\"%s\", \"%s\")";
+
+  private static final String CREATE_VOLUME_INFO =
+      "CREATE TABLE volumeInfo (" +
+          "adminName TEXT NOT NULL," +
+          "ownerName TEXT NOT NULL," +
+          "volumeName TEXT NOT NULL," +
+          "PRIMARY KEY (adminName, ownerName, volumeName))";
+  private static final String INSERT_VOLUME_INFO =
+      "INSERT INTO volumeInfo (adminName, ownerName, volumeName) " +
+          "VALUES (\"%s\", \"%s\", \"%s\")";
+
+  private static final String CREATE_ACL_INFO =
+      "CREATE TABLE aclInfo (" +
+          "adminName TEXT NOT NULL," +
+          "ownerName TEXT NOT NULL," +
+          "volumeName TEXT NOT NULL," +
+          "type TEXT NOT NULL," +
+          "userName TEXT NOT NULL," +
+          "rights TEXT NOT NULL," +
+          "FOREIGN KEY (adminName, ownerName, volumeName, userName, type)" +
+          "REFERENCES " +
+          "volumeInfo(adminName, ownerName, volumeName, userName, type)" +
+          "PRIMARY KEY (adminName, ownerName, volumeName, userName, type))";
+  private static final String INSERT_ACL_INFO =
+      "INSERT INTO aclInfo (adminName, ownerName, volumeName, type, " +
+          "userName, rights) " +
+          "VALUES (\"%s\", \"%s\", \"%s\", \"%s\", \"%s\", \"%s\")";
+
+  private static final String CREATE_BUCKET_INFO =
+      "CREATE TABLE bucketInfo (" +
+          "volumeName TEXT NOT NULL," +
+          "bucketName TEXT NOT NULL," +
+          "versionEnabled BOOLEAN NOT NULL," +
+          "storageType TEXT," +
+          "PRIMARY KEY (volumeName, bucketName))";
+  private static final String INSERT_BUCKET_INFO =
+      "INSERT INTO bucketInfo(volumeName, bucketName, " +
+          "versionEnabled, storageType)" +
+          "VALUES (\"%s\", \"%s\", \"%s\", \"%s\")";
+
+  private static final String CREATE_KEY_INFO =
+      "CREATE TABLE keyInfo (" +
+          "volumeName TEXT NOT NULL," +
+          "bucketName TEXT NOT NULL," +
+          "keyName TEXT NOT NULL," +
+          "dataSize INTEGER," +
+          "blockKey TEXT NOT NULL," +
+          "containerName TEXT NOT NULL," +
+          "PRIMARY KEY (volumeName, bucketName, keyName))";
+  private static final String INSERT_KEY_INFO =
+      "INSERT INTO keyInfo (volumeName, bucketName, keyName, dataSize, " +
+          "blockKey, containerName)" +
+          "VALUES (\"%s\", \"%s\", \"%s\", \"%s\", \"%s\", \"%s\")";
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SQLCLI.class);
+
+  public SQLCLI(OzoneConfiguration conf) {
+    this.options = getOptions();
+    this.parser = new BasicParser();
+    this.conf = conf;
+  }
+
+  @SuppressWarnings("static-access")
+  private Options getOptions() {
+    Options allOptions = new Options();
+    Option helpOpt = OptionBuilder
+        .hasArg(false)
+        .withLongOpt("help")
+        .withDescription("display help message")
+        .create("h");
+    allOptions.addOption(helpOpt);
+
+    Option dbPathOption = OptionBuilder
+        .withArgName("DB path")
+        .withLongOpt("dbPath")
+        .hasArgs(1)
+        .withDescription("specify DB path")
+        .create("p");
+    allOptions.addOption(dbPathOption);
+
+    Option outPathOption = OptionBuilder
+        .withArgName("output path")
+        .withLongOpt("outPath")
+        .hasArgs(1)
+        .withDescription("specify output DB file path")
+        .create("o");
+    allOptions.addOption(outPathOption);
+
+    return allOptions;
+  }
+
+  public void displayHelp() {
+    HelpFormatter helpFormatter = new HelpFormatter();
+    Options allOpts = getOptions();
+    helpFormatter.printHelp("hdfs oz_debug -p <DB path>"
+        + " -o <Output DB file path>", allOpts);
+  }
+
+  @Override
+  public int run(String[] args) throws Exception {
+    CommandLine commandLine = parseArgs(args);
+    if (commandLine.hasOption("help")) {
+      displayHelp();
+      return 0;
+    }
+    if (!commandLine.hasOption("p") || !commandLine.hasOption("o")) {
+      displayHelp();
+      return -1;
+    }
+    String value = commandLine.getOptionValue("p");
+    LOG.info("DB path {}", value);
+    // the value is supposed to be an absolute path to a container file
+    Path dbPath = Paths.get(value);
+    if (!Files.exists(dbPath)) {
+      LOG.error("DB path not exist:{}", dbPath);
+    }
+    Path parentPath = dbPath.getParent();
+    Path dbName = dbPath.getFileName();
+    if (parentPath == null || dbName == null) {
+      LOG.error("Error processing db path {}", dbPath);
+      return -1;
+    }
+
+    value = commandLine.getOptionValue("o");
+    Path outPath = Paths.get(value);
+    if (outPath == null || outPath.getParent() == null) {
+      LOG.error("Error processing output path {}", outPath);
+      return -1;
+    }
+
+    if (outPath.toFile().isDirectory()) {
+      LOG.error("The db output path should be a file instead of a directory");
+      return -1;
+    }
+
+    Path outParentPath = outPath.getParent();
+    if (outParentPath != null) {
+      if (!Files.exists(outParentPath)) {
+        Files.createDirectories(outParentPath);
+      }
+    }
+    LOG.info("Parent path [{}] db name [{}]", parentPath, dbName);
+    if (dbName.toString().endsWith(CONTAINER_DB_SUFFIX)) {
+      LOG.info("Converting container DB");
+      convertContainerDB(dbPath, outPath);
+    } else if (dbName.toString().equals(BLOCK_DB)) {
+      LOG.info("Converting block DB");
+      convertBlockDB(dbPath, outPath);
+    } else if (dbName.toString().equals(NODEPOOL_DB)) {
+      LOG.info("Converting node pool DB");
+      convertNodePoolDB(dbPath, outPath);
+    } else if (dbName.toString().equals(OPEN_CONTAINERS_DB)) {
+      LOG.info("Converting open container DB");
+      convertOpenContainerDB(dbPath, outPath);
+    } else if (dbName.toString().equals(KSM_DB_NAME)) {
+      LOG.info("Converting ksm DB");
+      convertKSMDB(dbPath, outPath);
+    } else {
+      LOG.error("Unrecognized db name {}", dbName);
+    }
+    return 0;
+  }
+
+  private Connection connectDB(String dbPath) throws Exception {
+    Class.forName("org.sqlite.JDBC");
+    String connectPath =
+        String.format("jdbc:sqlite:%s", dbPath);
+    return DriverManager.getConnection(connectPath);
+  }
+
+  private void executeSQL(Connection conn, String sql) throws SQLException {
+    try (Statement stmt = conn.createStatement()) {
+      stmt.executeUpdate(sql);
+    }
+  }
+
+  /**
+   * Convert ksm.db to sqlite db file. With following schema.
+   * (* for primary key)
+   *
+   * 1. for key type USER, it contains a username and a list volumes
+   * volumeList
+   * --------------------------------
+   *   userName*     |  volumeName*
+   * --------------------------------
+   *
+   * 2. for key type VOLUME:
+   *
+   * volumeInfo
+   * ----------------------------------------------
+   * adminName | ownerName* | volumeName* | aclID
+   * ----------------------------------------------
+   *
+   * aclInfo
+   * ----------------------------------------------
+   * aclEntryID* | type* | userName* | rights
+   * ----------------------------------------------
+   *
+   * 3. for key type BUCKET
+   * bucketInfo
+   * --------------------------------------------------------
+   * volumeName* | bucketName* | versionEnabled | storageType
+   * --------------------------------------------------------
+   *
+   * TODO : the following table will be changed when key partition is added.
+   * Only has the minimum entries for test purpose now.
+   * 4. for key type KEY
+   * -----------------------------------------------
+   * volumeName* | bucketName* | keyName* | dataSize
+   * -----------------------------------------------
+   *
+   *
+   *
+   * @param dbPath
+   * @param outPath
+   * @throws Exception
+   */
+  private void convertKSMDB(Path dbPath, Path outPath) throws Exception {
+    LOG.info("Create tables for sql ksm db.");
+    File dbFile = dbPath.toFile();
+    try (MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf).setDbFile(dbFile).build();
+         Connection conn = connectDB(outPath.toString())) {
+      executeSQL(conn, CREATE_VOLUME_LIST);
+      executeSQL(conn, CREATE_VOLUME_INFO);
+      executeSQL(conn, CREATE_ACL_INFO);
+      executeSQL(conn, CREATE_BUCKET_INFO);
+      executeSQL(conn, CREATE_KEY_INFO);
+
+      dbStore.iterate(null, (key, value) -> {
+        String keyString = DFSUtilClient.bytes2String(key);
+        KeyType type = getKeyType(keyString);
+        try {
+          insertKSMDB(conn, type, keyString, value);
+        } catch (IOException | SQLException ex) {
+          LOG.error("Exception inserting key {} type {}", keyString, type, ex);
+        }
+        return true;
+      });
+    }
+  }
+
+  private void insertKSMDB(Connection conn, KeyType type, String keyName,
+      byte[] value) throws IOException, SQLException {
+    switch (type) {
+    case USER:
+      VolumeList volumeList = VolumeList.parseFrom(value);
+      for (String volumeName : volumeList.getVolumeNamesList()) {
+        String insertVolumeList =
+            String.format(INSERT_VOLUME_LIST, keyName, volumeName);
+        executeSQL(conn, insertVolumeList);
+      }
+      break;
+    case VOLUME:
+      VolumeInfo volumeInfo = VolumeInfo.parseFrom(value);
+      String adminName = volumeInfo.getAdminName();
+      String ownerName = volumeInfo.getOwnerName();
+      String volumeName = volumeInfo.getVolume();
+      String insertVolumeInfo =
+          String.format(INSERT_VOLUME_INFO, adminName, ownerName, volumeName);
+      executeSQL(conn, insertVolumeInfo);
+      for (OzoneAclInfo aclInfo : volumeInfo.getVolumeAclsList()) {
+        String insertAclInfo =
+            String.format(INSERT_ACL_INFO, adminName, ownerName, volumeName,
+                aclInfo.getType(), aclInfo.getName(), aclInfo.getRights());
+        executeSQL(conn, insertAclInfo);
+      }
+      break;
+    case BUCKET:
+      BucketInfo bucketInfo = BucketInfo.parseFrom(value);
+      String insertBucketInfo =
+          String.format(INSERT_BUCKET_INFO, bucketInfo.getVolumeName(),
+              bucketInfo.getBucketName(), bucketInfo.getIsVersionEnabled(),
+              bucketInfo.getStorageType());
+      executeSQL(conn, insertBucketInfo);
+      break;
+    case KEY:
+      KeyInfo keyInfo = KeyInfo.parseFrom(value);
+      // TODO : the two fields container name and block id are no longer used,
+      // need to revisit this later.
+      String insertKeyInfo =
+          String.format(INSERT_KEY_INFO, keyInfo.getVolumeName(),
+              keyInfo.getBucketName(), keyInfo.getKeyName(),
+              keyInfo.getDataSize(), "EMPTY",
+              "EMPTY");
+      executeSQL(conn, insertKeyInfo);
+      break;
+    default:
+      throw new IOException("Unknown key from ksm.db");
+    }
+  }
+
+  private KeyType getKeyType(String key) {
+    if (key.startsWith(KSM_USER_PREFIX)) {
+      return KeyType.USER;
+    } else if (key.startsWith(KSM_VOLUME_PREFIX)) {
+      return key.replaceFirst(KSM_VOLUME_PREFIX, "")
+          .contains(KSM_BUCKET_PREFIX) ? KeyType.BUCKET : KeyType.VOLUME;
+    }else {
+      return KeyType.KEY;
+    }
+  }
+
+  private enum KeyType {
+    USER,
+    VOLUME,
+    BUCKET,
+    KEY,
+    UNKNOWN
+  }
+
+  /**
+   * Convert container.db to sqlite. The schema of sql db:
+   * three tables, containerId, containerMachines, datanodeInfo
+   * (* for primary key)
+   *
+   * containerInfo:
+   * ----------------------------------------------
+   * container name* | container lead datanode uuid
+   * ----------------------------------------------
+   *
+   * containerMembers:
+   * --------------------------------
+   * container name* |  datanodeUUid*
+   * --------------------------------
+   *
+   * datanodeInfo:
+   * ---------------------------------------------------------
+   * hostname | datanodeUUid* | xferPort | ipcPort
+   * ---------------------------------------------------------
+   *
+   * --------------------------------
+   * | containerPort
+   * --------------------------------
+   *
+   * @param dbPath path to container db.
+   * @param outPath path to output sqlite
+   * @throws IOException throws exception.
+   */
+  private void convertContainerDB(Path dbPath, Path outPath)
+      throws Exception {
+    LOG.info("Create tables for sql container db.");
+    File dbFile = dbPath.toFile();
+    try (MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf).setDbFile(dbFile).build();
+        Connection conn = connectDB(outPath.toString())) {
+      executeSQL(conn, CREATE_CONTAINER_INFO);
+      executeSQL(conn, CREATE_CONTAINER_MEMBERS);
+      executeSQL(conn, CREATE_DATANODE_INFO);
+
+      HashSet<String> uuidChecked = new HashSet<>();
+      dbStore.iterate(null, (key, value) -> {
+        String containerName = new String(key, encoding);
+        ContainerInfo containerInfo = null;
+        containerInfo = ContainerInfo.fromProtobuf(
+            HddsProtos.SCMContainerInfo.PARSER.parseFrom(value));
+        Preconditions.checkNotNull(containerInfo);
+        try {
+          //TODO: include container state to sqllite schema
+          insertContainerDB(conn, containerName,
+              containerInfo.getPipeline().getProtobufMessage(), uuidChecked);
+          return true;
+        } catch (SQLException e) {
+          throw new IOException(e);
+        }
+      });
+    }
+  }
+
+  /**
+   * Insert into the sqlite DB of container.db.
+   * @param conn the connection to the sqlite DB.
+   * @param containerName the name of the container.
+   * @param pipeline the actual container pipeline object.
+   * @param uuidChecked the uuid that has been already inserted.
+   * @throws SQLException throws exception.
+   */
+  private void insertContainerDB(Connection conn, String containerName,
+      Pipeline pipeline, Set<String> uuidChecked) throws SQLException {
+    LOG.info("Insert to sql container db, for container {}", containerName);
+    String insertContainerInfo = String.format(
+        INSERT_CONTAINER_INFO, containerName,
+        pipeline.getPipelineChannel().getLeaderID());
+    executeSQL(conn, insertContainerInfo);
+
+    for (HddsProtos.DatanodeDetailsProto dd :
+        pipeline.getPipelineChannel().getMembersList()) {
+      String uuid = dd.getUuid();
+      if (!uuidChecked.contains(uuid)) {
+        // we may also not use this checked set, but catch exception instead
+        // but this seems a bit cleaner.
+        String ipAddr = dd.getIpAddress();
+        String hostName = dd.getHostName();
+        int containerPort = dd.getContainerPort();
+        String insertMachineInfo = String.format(
+            INSERT_DATANODE_INFO, hostName, uuid, ipAddr, containerPort);
+        executeSQL(conn, insertMachineInfo);
+        uuidChecked.add(uuid);
+      }
+      String insertContainerMembers = String.format(
+          INSERT_CONTAINER_MEMBERS, containerName, uuid);
+      executeSQL(conn, insertContainerMembers);
+    }
+    LOG.info("Insertion completed.");
+  }
+
+  /**
+   * Converts block.db to sqlite. This is rather simple db, the schema has only
+   * one table:
+   *
+   * blockContainer
+   * --------------------------
+   * blockKey*  | containerName
+   * --------------------------
+   *
+   * @param dbPath path to container db.
+   * @param outPath path to output sqlite
+   * @throws IOException throws exception.
+   */
+  private void convertBlockDB(Path dbPath, Path outPath) throws Exception {
+    LOG.info("Create tables for sql block db.");
+    File dbFile = dbPath.toFile();
+    try (MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf).setDbFile(dbFile).build();
+        Connection conn = connectDB(outPath.toString())) {
+      executeSQL(conn, CREATE_BLOCK_CONTAINER);
+
+      dbStore.iterate(null, (key, value) -> {
+        String blockKey = DFSUtilClient.bytes2String(key);
+        String containerName = DFSUtilClient.bytes2String(value);
+        String insertBlockContainer = String.format(
+            INSERT_BLOCK_CONTAINER, blockKey, containerName);
+
+        try {
+          executeSQL(conn, insertBlockContainer);
+          return true;
+        } catch (SQLException e) {
+          throw new IOException(e);
+        }
+      });
+    }
+  }
+
+  /**
+   * Converts nodePool.db to sqlite. The schema of sql db:
+   * two tables, nodePool and datanodeInfo (the same datanode Info as for
+   * container.db).
+   *
+   * nodePool
+   * ---------------------------------------------------------
+   * datanodeUUID* | poolName*
+   * ---------------------------------------------------------
+   *
+   * datanodeInfo:
+   * ---------------------------------------------------------
+   * hostname | datanodeUUid* | xferPort | ipcPort
+   * ---------------------------------------------------------
+   *
+   * --------------------------------
+   * |containerPort
+   * --------------------------------
+   *
+   * @param dbPath path to container db.
+   * @param outPath path to output sqlite
+   * @throws IOException throws exception.
+   */
+  private void convertNodePoolDB(Path dbPath, Path outPath) throws Exception {
+    LOG.info("Create table for sql node pool db.");
+    File dbFile = dbPath.toFile();
+    try (MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf).setDbFile(dbFile).build();
+        Connection conn = connectDB(outPath.toString())) {
+      executeSQL(conn, CREATE_NODE_POOL);
+      executeSQL(conn, CREATE_DATANODE_INFO);
+
+      dbStore.iterate(null, (key, value) -> {
+        DatanodeDetails nodeId = DatanodeDetails
+            .getFromProtoBuf(HddsProtos.DatanodeDetailsProto
+                .PARSER.parseFrom(key));
+        String blockPool = DFSUtil.bytes2String(value);
+        try {
+          insertNodePoolDB(conn, blockPool, nodeId);
+          return true;
+        } catch (SQLException e) {
+          throw new IOException(e);
+        }
+      });
+    }
+  }
+
+  private void insertNodePoolDB(Connection conn, String blockPool,
+      DatanodeDetails datanodeDetails) throws SQLException {
+    String insertNodePool = String.format(INSERT_NODE_POOL,
+        datanodeDetails.getUuidString(), blockPool);
+    executeSQL(conn, insertNodePool);
+
+    String insertDatanodeDetails = String
+        .format(INSERT_DATANODE_INFO, datanodeDetails.getHostName(),
+            datanodeDetails.getUuidString(), datanodeDetails.getIpAddress(),
+            datanodeDetails.getContainerPort());
+    executeSQL(conn, insertDatanodeDetails);
+  }
+
+  /**
+   * Convert openContainer.db to sqlite db file. This is rather simple db,
+   * the schema has only one table:
+   *
+   * openContainer
+   * -------------------------------
+   * containerName* | containerUsed
+   * -------------------------------
+   *
+   * @param dbPath path to container db.
+   * @param outPath path to output sqlite
+   * @throws IOException throws exception.
+   */
+  private void convertOpenContainerDB(Path dbPath, Path outPath)
+      throws Exception {
+    LOG.info("Create table for open container db.");
+    File dbFile = dbPath.toFile();
+    try (MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
+        .setConf(conf).setDbFile(dbFile).build();
+        Connection conn = connectDB(outPath.toString())) {
+      executeSQL(conn, CREATE_OPEN_CONTAINER);
+
+      dbStore.iterate(null, (key, value) -> {
+        String containerName = DFSUtil.bytes2String(key);
+        Long containerUsed =
+            Long.parseLong(DFSUtil.bytes2String(value));
+        String insertOpenContainer = String
+            .format(INSERT_OPEN_CONTAINER, containerName, containerUsed);
+        try {
+          executeSQL(conn, insertOpenContainer);
+          return true;
+        } catch (SQLException e) {
+          throw new IOException(e);
+        }
+      });
+    }
+  }
+
+  private CommandLine parseArgs(String[] argv)
+      throws ParseException {
+    return parser.parse(options, argv);
+  }
+
+  public static void main(String[] args) {
+    Tool shell = new SQLCLI(new OzoneConfiguration());
+    int res = 0;
+    try {
+      ToolRunner.run(shell, args);
+    } catch (Exception ex) {
+      LOG.error(ex.toString());
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Command execution failed", ex);
+      }
+      res = 1;
+    }
+    System.exit(res);
+  }
+}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java
new file mode 100644
index 0000000..4c38ae0
--- /dev/null
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm.cli;
+
+/**
+ * Command line helpers for scm management.
+ */
diff --git a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/test/OzoneTestDriver.java b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/test/OzoneTestDriver.java
new file mode 100644
index 0000000..e62ba47
--- /dev/null
+++ b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/test/OzoneTestDriver.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.test;
+
+import org.apache.hadoop.ozone.freon.Freon;
+import org.apache.hadoop.util.ProgramDriver;
+
+/**
+ * Driver for Ozone tests.
+ */
+public class OzoneTestDriver {
+
+  private final ProgramDriver pgd;
+
+  public OzoneTestDriver() {
+    this(new ProgramDriver());
+  }
+
+  public OzoneTestDriver(ProgramDriver pgd) {
+    this.pgd = pgd;
+    try {
+      pgd.addClass("freon", Freon.class,
+          "Populates ozone with data.");
+    } catch(Throwable e) {
+      e.printStackTrace();
+    }
+  }
+
+  public void run(String[] args) {
+    int exitCode = -1;
+    try {
+      exitCode = pgd.run(args);
+    } catch(Throwable e) {
+      e.printStackTrace();
+    }
+
+    System.exit(exitCode);
+  }
+
+  public static void main(String[] args){
+    new OzoneTestDriver().run(args);
+  }
+}
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 5f5abc4..793ffb4 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -96,6 +96,8 @@
     <apacheds.version>2.0.0-M21</apacheds.version>
     <ldap-api.version>1.0.0-M33</ldap-api.version>
 
+    <!-- Apache Ratis version -->
+    <ratis.version>0.1.1-alpha-8fd74ed-SNAPSHOT</ratis.version>
     <jcache.version>1.0-alpha-1</jcache.version>
     <ehcache.version>3.3.1</ehcache.version>
     <hikari.version>2.4.12</hikari.version>
@@ -558,6 +560,113 @@
 
       <dependency>
         <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone-objectstore-service</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-common</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-client</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-tools</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone-tools</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone-integration-test</artifactId>
+        <version>${hdds.version}</version>
+        <type>test-jar</type>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-server-framework</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-server-scm</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-container-service</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-container-service</artifactId>
+        <version>${hdds.version}</version>
+        <type>test-jar</type>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdds-server-scm</artifactId>
+        <type>test-jar</type>
+        <version>${hdds.version}</version>
+      </dependency>
+
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone-common</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone-ozone-manager</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone-ozone-manager</artifactId>
+        <version>${hdds.version}</version>
+        <type>test-jar</type>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-ozone-client</artifactId>
+        <version>${hdds.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.openjdk.jmh</groupId>
+        <artifactId>jmh-core</artifactId>
+        <version>1.19</version>
+      </dependency>
+      <dependency>
+        <groupId>org.openjdk.jmh</groupId>
+        <artifactId>jmh-generator-annprocess</artifactId>
+        <version>1.19</version>
+      </dependency>
+
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-kms</artifactId>
         <version>${hadoop.version}</version>
       </dependency>
@@ -767,6 +876,43 @@
       </dependency>
 
       <dependency>
+        <groupId>org.jctools</groupId>
+        <artifactId>jctools-core</artifactId>
+        <version>1.2.1</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.ratis</groupId>
+        <artifactId>ratis-proto-shaded</artifactId>
+        <version>${ratis.version}</version>
+      </dependency>
+      <dependency>
+        <artifactId>ratis-common</artifactId>
+        <groupId>org.apache.ratis</groupId>
+        <version>${ratis.version}</version>
+      </dependency>
+      <dependency>
+        <artifactId>ratis-client</artifactId>
+        <groupId>org.apache.ratis</groupId>
+        <version>${ratis.version}</version>
+      </dependency>
+      <dependency>
+        <artifactId>ratis-server</artifactId>
+        <groupId>org.apache.ratis</groupId>
+        <version>${ratis.version}</version>
+      </dependency>
+      <dependency>
+        <artifactId>ratis-netty</artifactId>
+        <groupId>org.apache.ratis</groupId>
+        <version>${ratis.version}</version>
+      </dependency>
+      <dependency>
+        <artifactId>ratis-grpc</artifactId>
+        <groupId>org.apache.ratis</groupId>
+        <version>${ratis.version}</version>
+      </dependency>
+
+      <dependency>
         <groupId>io.netty</groupId>
         <artifactId>netty</artifactId>
         <version>3.10.5.Final</version>
diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml
index eda7183..40df7c5 100644
--- a/hadoop-project/src/site/site.xml
+++ b/hadoop-project/src/site/site.xml
@@ -109,6 +109,15 @@
       <item name="Provided Storage" href="hadoop-project-dist/hadoop-hdfs/HdfsProvidedStorage.html"/>
     </menu>
 
+    <menu name="Ozone" inherit="top">
+      <item name="Getting Started" href="hadoop-project-dist/hadoop-hdfs/OzoneGettingStarted.html"/>
+      <item name="Ozone Overview"
+            href="hadoop-project-dist/hadoop-hdfs/OzoneOverview.html"/>
+      <item name="Commands Reference" href="hadoop-project-dist/hadoop-hdfs/OzoneCommandShell.html"/>
+      <item name="Ozone Rest API" href="hadoop-project-dist/hadoop-hdfs/OzoneRest.html"/>
+      <item name="Ozone Metrics" href="hadoop-project-dist/hadoop-hdfs/OzoneMetrics.html"/>
+    </menu>
+
     <menu name="MapReduce" inherit="top">
       <item name="Tutorial" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html"/>
       <item name="Commands Reference" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredCommands.html"/>
diff --git a/hadoop-tools/hadoop-ozone/pom.xml b/hadoop-tools/hadoop-ozone/pom.xml
new file mode 100644
index 0000000..1cacbb3
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/pom.xml
@@ -0,0 +1,199 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>hadoop-project</artifactId>
+    <version>3.2.0-SNAPSHOT</version>
+    <relativePath>../../hadoop-project</relativePath>
+  </parent>
+  <artifactId>hadoop-ozone-filesystem</artifactId>
+  <name>Apache Hadoop Ozone FileSystem</name>
+  <packaging>jar</packaging>
+
+  <properties>
+    <file.encoding>UTF-8</file.encoding>
+    <downloadSources>true</downloadSources>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>deplist</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>list</goal>
+            </goals>
+            <configuration>
+              <!-- build a shellprofile -->
+              <outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-optional.txt</outputFile>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-scm</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-framework</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-ozone-manager</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-container-service</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-client</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-objectstore-service</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-client</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-integration-test</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-distcp</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-distcp</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-framework</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-server-scm</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-client</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdds-container-service</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-ozone-ozone-manager</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+</project>
diff --git a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/Constants.java b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/Constants.java
new file mode 100644
index 0000000..992d43a
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/Constants.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone;
+
+/**
+ * Constants for Ozone FileSystem implementation.
+ */
+public final class Constants {
+
+  public static final String OZONE_URI_SCHEME = "o3";
+
+  public static final String OZONE_DEFAULT_USER = "hdfs";
+
+  public static final String OZONE_HTTP_SCHEME = "http://";
+
+  public static final String OZONE_USER_DIR = "/user";
+
+  /** Local buffer directory. */
+  public static final String BUFFER_DIR_KEY = "fs.ozone.buffer.dir";
+
+  /** Temporary directory. */
+  public static final String BUFFER_TMP_KEY = "hadoop.tmp.dir";
+
+  public static final String OZONE_URI_DELIMITER = "/";
+
+  /** Page size for Ozone listing operation. */
+  public static final int LISTING_PAGE_SIZE = 1024;
+
+  private Constants() {
+
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzFs.java b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzFs.java
new file mode 100644
index 0000000..46dd645
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzFs.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.DelegateToFileSystem;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * ozone implementation of AbstractFileSystem.
+ * This impl delegates to the OzoneFileSystem
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class OzFs extends DelegateToFileSystem {
+
+  public OzFs(URI theUri, Configuration conf)
+      throws IOException, URISyntaxException {
+    super(theUri, new OzoneFileSystem(), conf,
+        Constants.OZONE_URI_SCHEME, false);
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java
new file mode 100644
index 0000000..4c5c0c8
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FSInputStream;
+import org.apache.hadoop.ozone.client.io.ChunkGroupInputStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * The input stream for Ozone file system.
+ *
+ * TODO: Make inputStream generic for both rest and rpc clients
+ * This class is not thread safe.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class OzoneFSInputStream extends FSInputStream {
+
+  private final ChunkGroupInputStream inputStream;
+
+  public OzoneFSInputStream(InputStream inputStream) {
+    this.inputStream = (ChunkGroupInputStream)inputStream;
+  }
+
+  @Override
+  public int read() throws IOException {
+    return inputStream.read();
+  }
+
+  @Override
+  public int read(byte[] b, int off, int len) throws IOException {
+    return inputStream.read(b, off, len);
+  }
+
+  @Override
+  public synchronized void close() throws IOException {
+    inputStream.close();
+  }
+
+  @Override
+  public void seek(long pos) throws IOException {
+    inputStream.seek(pos);
+  }
+
+  @Override
+  public long getPos() throws IOException {
+    return inputStream.getPos();
+  }
+
+  @Override
+  public boolean seekToNewSource(long targetPos) throws IOException {
+    return false;
+  }
+
+  @Override
+  public int available() throws IOException {
+    return inputStream.available();
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java
new file mode 100644
index 0000000..faa3628
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import org.apache.hadoop.ozone.client.io.ChunkGroupOutputStream;
+
+
+/**
+ * The output stream for Ozone file system.
+ *
+ * TODO: Make outputStream generic for both rest and rpc clients
+ * This class is not thread safe.
+ */
+public class OzoneFSOutputStream extends OutputStream {
+
+  private final ChunkGroupOutputStream outputStream;
+
+  public OzoneFSOutputStream(OutputStream outputStream) {
+    this.outputStream = (ChunkGroupOutputStream)outputStream;
+  }
+
+  @Override
+  public void write(int b) throws IOException {
+    outputStream.write(b);
+  }
+
+  @Override
+  public void write(byte[] b, int off, int len) throws IOException {
+    outputStream.write(b, off, len);
+  }
+
+  @Override
+  public synchronized void flush() throws IOException {
+    outputStream.flush();
+  }
+
+  @Override
+  public synchronized void close() throws IOException {
+    outputStream.close();
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java
new file mode 100644
index 0000000..c2a2fe2
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/OzoneFileSystem.java
@@ -0,0 +1,700 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Iterator;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.fs.CreateFlag;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.client.ObjectStore;
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneClientFactory;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.client.OzoneKey;
+import org.apache.hadoop.ozone.client.OzoneVolume;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.ozone.client.io.OzoneInputStream;
+import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
+
+import static org.apache.hadoop.fs.ozone.Constants.OZONE_DEFAULT_USER;
+import static org.apache.hadoop.fs.ozone.Constants.OZONE_URI_SCHEME;
+import static org.apache.hadoop.fs.ozone.Constants.OZONE_USER_DIR;
+import static org.apache.hadoop.fs.ozone.Constants.OZONE_URI_DELIMITER;
+import static org.apache.hadoop.fs.ozone.Constants.LISTING_PAGE_SIZE;
+
+/**
+ * The Ozone Filesystem implementation.
+ *
+ * This subclass is marked as private as code should not be creating it
+ * directly; use {@link FileSystem#get(Configuration)} and variants to create
+ * one. If cast to {@link OzoneFileSystem}, extra methods and features may be
+ * accessed. Consider those private and unstable.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class OzoneFileSystem extends FileSystem {
+  static final Logger LOG = LoggerFactory.getLogger(OzoneFileSystem.class);
+
+  /** The Ozone client for connecting to Ozone server. */
+  private OzoneClient ozoneClient;
+  private ObjectStore objectStore;
+  private OzoneVolume volume;
+  private OzoneBucket bucket;
+  private URI uri;
+  private String userName;
+  private Path workingDir;
+  private ReplicationType replicationType;
+  private ReplicationFactor replicationFactor;
+
+  private static final Pattern URL_SCHEMA_PATTERN =
+      Pattern.compile("(.+)\\.([^\\.]+)");
+
+  @Override
+  public void initialize(URI name, Configuration conf) throws IOException {
+    super.initialize(name, conf);
+    setConf(conf);
+    Objects.requireNonNull(name.getScheme(), "No scheme provided in " + name);
+    assert getScheme().equals(name.getScheme());
+
+    String authority = name.getAuthority();
+
+    Matcher matcher = URL_SCHEMA_PATTERN.matcher(authority);
+
+    if (!matcher.matches()) {
+      throw new IllegalArgumentException("Ozone file system url should be "
+          + "in the form o3://bucket.volume");
+    }
+    String bucketStr = matcher.group(1);
+    String volumeStr = matcher.group(2);
+
+    try {
+      uri = new URIBuilder().setScheme(OZONE_URI_SCHEME)
+          .setHost(authority).build();
+      LOG.trace("Ozone URI for ozfs initialization is " + uri);
+      this.ozoneClient = OzoneClientFactory.getRpcClient(conf);
+      objectStore = ozoneClient.getObjectStore();
+      this.volume = objectStore.getVolume(volumeStr);
+      this.bucket = volume.getBucket(bucketStr);
+      this.replicationType = ReplicationType.valueOf(
+          conf.get(OzoneConfigKeys.OZONE_REPLICATION_TYPE,
+              OzoneConfigKeys.OZONE_REPLICATION_TYPE_DEFAULT));
+      this.replicationFactor = ReplicationFactor.valueOf(
+          conf.getInt(OzoneConfigKeys.OZONE_REPLICATION,
+              OzoneConfigKeys.OZONE_REPLICATION_DEFAULT));
+      try {
+        this.userName =
+            UserGroupInformation.getCurrentUser().getShortUserName();
+      } catch (IOException e) {
+        this.userName = OZONE_DEFAULT_USER;
+      }
+      this.workingDir = new Path(OZONE_USER_DIR, this.userName)
+              .makeQualified(this.uri, this.workingDir);
+    } catch (URISyntaxException ue) {
+      final String msg = "Invalid Ozone endpoint " + name;
+      LOG.error(msg, ue);
+      throw new IOException(msg, ue);
+    }
+  }
+
+  @Override
+  public void close() throws IOException {
+    try {
+      ozoneClient.close();
+    } finally {
+      super.close();
+    }
+  }
+
+  @Override
+  public URI getUri() {
+    return uri;
+  }
+
+  @Override
+  public String getScheme() {
+    return OZONE_URI_SCHEME;
+  }
+
+  @Override
+  public FSDataInputStream open(Path f, int bufferSize) throws IOException {
+    LOG.trace("open() path:{}", f);
+    final FileStatus fileStatus = getFileStatus(f);
+    final String key = pathToKey(f);
+    if (fileStatus.isDirectory()) {
+      throw new FileNotFoundException("Can't open directory " + f + " to read");
+    }
+
+    return new FSDataInputStream(
+        new OzoneFSInputStream(bucket.readKey(key).getInputStream()));
+  }
+
+  @Override
+  public FSDataOutputStream create(Path f, FsPermission permission,
+                                   boolean overwrite, int bufferSize,
+                                   short replication, long blockSize,
+                                   Progressable progress) throws IOException {
+    LOG.trace("create() path:{}", f);
+    final String key = pathToKey(f);
+    final FileStatus status;
+    try {
+      status = getFileStatus(f);
+      if (status.isDirectory()) {
+        throw new FileAlreadyExistsException(f + " is a directory");
+      } else {
+        if (!overwrite) {
+          // path references a file and overwrite is disabled
+          throw new FileAlreadyExistsException(f + " already exists");
+        }
+        LOG.trace("Overwriting file {}", f);
+        deleteObject(key);
+      }
+    } catch (FileNotFoundException ignored) {
+      // check if the parent directory needs to be created
+      Path parent = f.getParent();
+      try {
+        // create all the directories for the parent
+        FileStatus parentStatus = getFileStatus(parent);
+        LOG.trace("parent key:{} status:{}", key, parentStatus);
+      } catch (FileNotFoundException e) {
+        mkdirs(parent);
+      }
+      // This exception needs to ignored as this means that the file currently
+      // does not exists and a new file can thus be created.
+    }
+
+    OzoneOutputStream ozoneOutputStream =
+        bucket.createKey(key, 0, replicationType, replicationFactor);
+    // We pass null to FSDataOutputStream so it won't count writes that
+    // are being buffered to a file
+    return new FSDataOutputStream(
+        new OzoneFSOutputStream(ozoneOutputStream.getOutputStream()), null);
+  }
+
+  @Override
+  public FSDataOutputStream createNonRecursive(Path path,
+      FsPermission permission,
+      EnumSet<CreateFlag> flags,
+      int bufferSize,
+      short replication,
+      long blockSize,
+      Progressable progress) throws IOException {
+    final Path parent = path.getParent();
+    if (parent != null) {
+      // expect this to raise an exception if there is no parent
+      if (!getFileStatus(parent).isDirectory()) {
+        throw new FileAlreadyExistsException("Not a directory: " + parent);
+      }
+    }
+    return create(path, permission, flags.contains(CreateFlag.OVERWRITE),
+        bufferSize, replication, blockSize, progress);
+  }
+
+  @Override
+  public FSDataOutputStream append(Path f, int bufferSize,
+      Progressable progress) throws IOException {
+    throw new UnsupportedOperationException("append() Not implemented by the "
+        + getClass().getSimpleName() + " FileSystem implementation");
+  }
+
+  private class RenameIterator extends OzoneListingIterator {
+    private final String srcKey;
+    private final String dstKey;
+
+    RenameIterator(Path srcPath, Path dstPath)
+        throws IOException {
+      super(srcPath);
+      srcKey = pathToKey(srcPath);
+      dstKey = pathToKey(dstPath);
+      LOG.trace("rename from:{} to:{}", srcKey, dstKey);
+    }
+
+    boolean processKey(String key) throws IOException {
+      String newKeyName = dstKey.concat(key.substring(srcKey.length()));
+      rename(key, newKeyName);
+      return true;
+    }
+
+    // TODO: currently rename work by copying the streams, with changes in KSM,
+    // this operation can be improved by renaming the keys in KSM directly.
+    private void rename(String src, String dst) throws IOException {
+      try (OzoneInputStream inputStream = bucket.readKey(src);
+          OzoneOutputStream outputStream = bucket
+              .createKey(dst, 0, replicationType, replicationFactor)) {
+        IOUtils.copyBytes(inputStream, outputStream, getConf());
+      }
+    }
+  }
+
+  /**
+   * Check whether the source and destination path are valid and then perform
+   * rename by copying the data from source path to destination path.
+   *
+   * The rename operation is performed by copying data from source key
+   * to destination key. This is done by reading the source key data into a
+   * temporary file and then writing this temporary file to destination key.
+   * The temporary file is deleted after the rename operation.
+   * TODO: Optimize the operation by renaming keys in KSM.
+   *
+   * @param src source path for rename
+   * @param dst destination path for rename
+   * @return true if rename operation succeeded or
+   * if the src and dst have the same path and are of the same type
+   * @throws IOException on I/O errors or if the src/dst paths are invalid.
+   */
+  @Override
+  public boolean rename(Path src, Path dst) throws IOException {
+    LOG.trace("rename() from:{} to:{}", src, dst);
+
+    if (src.isRoot()) {
+      // Cannot rename root of file system
+      LOG.trace("Cannot rename the root of a filesystem");
+      return false;
+    }
+
+    // Cannot rename a directory to its own subdirectory
+    Path dstParent = dst.getParent();
+    while (dstParent != null && !src.equals(dstParent)) {
+      dstParent = dstParent.getParent();
+    }
+    Preconditions.checkArgument(dstParent == null,
+        "Cannot rename a directory to its own subdirectory");
+    // Check if the source exists
+    FileStatus srcStatus;
+    try {
+      srcStatus = getFileStatus(src);
+    } catch (FileNotFoundException fnfe) {
+      // source doesn't exist, return
+      return false;
+    }
+
+    // Check if the destination exists
+    FileStatus dstStatus;
+    try {
+      dstStatus = getFileStatus(dst);
+    } catch (FileNotFoundException fnde) {
+      dstStatus = null;
+    }
+
+    if (dstStatus == null) {
+      // If dst doesn't exist, check whether dst parent dir exists or not
+      // if the parent exists, the source can still be renamed to dst path
+      dstStatus = getFileStatus(dst.getParent());
+      if (!dstStatus.isDirectory()) {
+        throw new IOException(String.format(
+            "Failed to rename %s to %s, %s is a file", src, dst,
+            dst.getParent()));
+      }
+    } else {
+      // if dst exists and source and destination are same,
+      // check both the src and dst are of same type
+      if (srcStatus.getPath().equals(dstStatus.getPath())) {
+        return !srcStatus.isDirectory();
+      } else if (dstStatus.isDirectory()) {
+        // If dst is a directory, rename source as subpath of it.
+        // for example rename /source to /dst will lead to /dst/source
+        dst = new Path(dst, src.getName());
+        FileStatus[] statuses;
+        try {
+          statuses = listStatus(dst);
+        } catch (FileNotFoundException fnde) {
+          statuses = null;
+        }
+
+        if (statuses != null && statuses.length > 0) {
+          // If dst exists and not a directory not empty
+          throw new FileAlreadyExistsException(String.format(
+              "Failed to rename %s to %s, file already exists or not empty!",
+              src, dst));
+        }
+      } else {
+        // If dst is not a directory
+        throw new FileAlreadyExistsException(String.format(
+            "Failed to rename %s to %s, file already exists!", src, dst));
+      }
+    }
+
+    if (srcStatus.isDirectory()) {
+      if (dst.toString().startsWith(src.toString())) {
+        LOG.trace("Cannot rename a directory to a subdirectory of self");
+        return false;
+      }
+    }
+    RenameIterator iterator = new RenameIterator(src, dst);
+    iterator.iterate();
+    return src.equals(dst) || delete(src, true);
+  }
+
+  private class DeleteIterator extends OzoneListingIterator {
+    private boolean recursive;
+    DeleteIterator(Path f, boolean recursive)
+        throws IOException {
+      super(f);
+      this.recursive = recursive;
+      if (getStatus().isDirectory()
+          && !this.recursive
+          && listStatus(f).length != 0) {
+        throw new PathIsNotEmptyDirectoryException(f.toString());
+      }
+    }
+
+    boolean processKey(String key) throws IOException {
+      if (key.equals("")) {
+        LOG.trace("Skipping deleting root directory");
+        return true;
+      } else {
+        LOG.trace("deleting key:" + key);
+        boolean succeed = deleteObject(key);
+        // if recursive delete is requested ignore the return value of
+        // deleteObject and issue deletes for other keys.
+        return recursive || succeed;
+      }
+    }
+  }
+
+  @Override
+  public boolean delete(Path f, boolean recursive) throws IOException {
+    LOG.trace("delete() path:{} recursive:{}", f, recursive);
+    try {
+      DeleteIterator iterator = new DeleteIterator(f, recursive);
+      return iterator.iterate();
+    } catch (FileNotFoundException e) {
+      LOG.debug("Couldn't delete {} - does not exist", f);
+      return false;
+    }
+  }
+
+  private class ListStatusIterator extends OzoneListingIterator {
+    private  List<FileStatus> statuses = new ArrayList<>(LISTING_PAGE_SIZE);
+    private Path f;
+
+    ListStatusIterator(Path f) throws IOException  {
+      super(f);
+      this.f = f;
+    }
+
+    boolean processKey(String key) throws IOException {
+      Path keyPath = new Path(OZONE_URI_DELIMITER + key);
+      if (key.equals(getPathKey())) {
+        if (pathIsDirectory()) {
+          return true;
+        } else {
+          statuses.add(getFileStatus(keyPath));
+          return true;
+        }
+      }
+      // left with only subkeys now
+      if (pathToKey(keyPath.getParent()).equals(pathToKey(f))) {
+        // skip keys which are for subdirectories of the directory
+        statuses.add(getFileStatus(keyPath));
+      }
+      return true;
+    }
+
+    FileStatus[] getStatuses() {
+      return statuses.toArray(new FileStatus[statuses.size()]);
+    }
+  }
+
+  @Override
+  public FileStatus[] listStatus(Path f) throws IOException {
+    LOG.trace("listStatus() path:{}", f);
+    ListStatusIterator iterator = new ListStatusIterator(f);
+    iterator.iterate();
+    return iterator.getStatuses();
+  }
+
+  @Override
+  public void setWorkingDirectory(Path newDir) {
+    workingDir = newDir;
+  }
+
+  @Override
+  public Path getWorkingDirectory() {
+    return workingDir;
+  }
+
+  /**
+   * Check whether the path is valid and then create directories.
+   * Directory is represented using a key with no value.
+   * All the non-existent parent directories are also created.
+   *
+   * @param path directory path to be created
+   * @return true if directory exists or created successfully.
+   * @throws IOException
+   */
+  private boolean mkdir(Path path) throws IOException {
+    Path fPart = path;
+    Path prevfPart = null;
+    do {
+      LOG.trace("validating path:{}", fPart);
+      try {
+        FileStatus fileStatus = getFileStatus(fPart);
+        if (fileStatus.isDirectory()) {
+          // If path exists and a directory, exit
+          break;
+        } else {
+          // Found a file here, rollback and delete newly created directories
+          LOG.trace("Found a file with same name as directory, path:{}", fPart);
+          if (prevfPart != null) {
+            delete(prevfPart, true);
+          }
+          throw new FileAlreadyExistsException(String.format(
+              "Can't make directory for path '%s', it is a file.", fPart));
+        }
+      } catch (FileNotFoundException fnfe) {
+        LOG.trace("creating directory for fpart:{}", fPart);
+        String key = pathToKey(fPart);
+        String dirKey = addTrailingSlashIfNeeded(key);
+        if (!createDirectory(dirKey)) {
+          // Directory creation failed here,
+          // rollback and delete newly created directories
+          LOG.trace("Directory creation failed, path:{}", fPart);
+          if (prevfPart != null) {
+            delete(prevfPart, true);
+          }
+          return false;
+        }
+      }
+      prevfPart = fPart;
+      fPart = fPart.getParent();
+    } while (fPart != null);
+    return true;
+  }
+
+  @Override
+  public boolean mkdirs(Path f, FsPermission permission) throws IOException {
+    LOG.trace("mkdir() path:{} ", f);
+    String key = pathToKey(f);
+    if (StringUtils.isEmpty(key)) {
+      return false;
+    }
+    return mkdir(f);
+  }
+
+  @Override
+  public FileStatus getFileStatus(Path f) throws IOException {
+    LOG.trace("getFileStatus() path:{}", f);
+    Path qualifiedPath = f.makeQualified(uri, workingDir);
+    String key = pathToKey(qualifiedPath);
+
+    if (key.length() == 0) {
+      return new FileStatus(0, true, 1, 0,
+          bucket.getCreationTime(), qualifiedPath);
+    }
+
+    // consider this a file and get key status
+    OzoneKey meta = getKeyInfo(key);
+    if (meta == null) {
+      key = addTrailingSlashIfNeeded(key);
+      meta = getKeyInfo(key);
+    }
+
+    if (meta == null) {
+      LOG.trace("File:{} not found", f);
+      throw new FileNotFoundException(f + ": No such file or directory!");
+    } else if (isDirectory(meta)) {
+      return new FileStatus(0, true, 1, 0,
+          meta.getModificationTime(), qualifiedPath);
+    } else {
+      //TODO: Fetch replication count from ratis config
+      return new FileStatus(meta.getDataSize(), false, 1,
+            getDefaultBlockSize(f), meta.getModificationTime(), qualifiedPath);
+    }
+  }
+
+  /**
+   * Helper method to fetch the key metadata info.
+   * @param key key whose metadata information needs to be fetched
+   * @return metadata info of the key
+   */
+  private OzoneKey getKeyInfo(String key) {
+    try {
+      return bucket.getKey(key);
+    } catch (IOException e) {
+      LOG.trace("Key:{} does not exists", key);
+      return null;
+    }
+  }
+
+  /**
+   * Helper method to check if an Ozone key is representing a directory.
+   * @param key key to be checked as a directory
+   * @return true if key is a directory, false otherwise
+   */
+  private boolean isDirectory(OzoneKey key) {
+    LOG.trace("key name:{} size:{}", key.getName(),
+        key.getDataSize());
+    return key.getName().endsWith(OZONE_URI_DELIMITER)
+        && (key.getDataSize() == 0);
+  }
+
+  /**
+   * Helper method to create an directory specified by key name in bucket.
+   * @param keyName key name to be created as directory
+   * @return true if the key is created, false otherwise
+   */
+  private boolean createDirectory(String keyName) {
+    try {
+      LOG.trace("creating dir for key:{}", keyName);
+      bucket.createKey(keyName, 0, replicationType, replicationFactor).close();
+      return true;
+    } catch (IOException ioe) {
+      LOG.error("create key failed for key:{}", keyName, ioe);
+      return false;
+    }
+  }
+
+  /**
+   * Helper method to delete an object specified by key name in bucket.
+   * @param keyName key name to be deleted
+   * @return true if the key is deleted, false otherwise
+   */
+  private boolean deleteObject(String keyName) {
+    LOG.trace("issuing delete for key" + keyName);
+    try {
+      bucket.deleteKey(keyName);
+      return true;
+    } catch (IOException ioe) {
+      LOG.error("delete key failed " + ioe.getMessage());
+      return false;
+    }
+  }
+
+  /**
+   * Turn a path (relative or otherwise) into an Ozone key.
+   *
+   * @param path the path of the file.
+   * @return the key of the object that represents the file.
+   */
+  public String pathToKey(Path path) {
+    Objects.requireNonNull(path, "Path can not be null!");
+    if (!path.isAbsolute()) {
+      path = new Path(workingDir, path);
+    }
+    // removing leading '/' char
+    String key = path.toUri().getPath().substring(1);
+    LOG.trace("path for key:{} is:{}", key, path);
+    return key;
+  }
+
+  /**
+   * Add trailing delimiter to path if it is already not present.
+   *
+   * @param key the ozone Key which needs to be appended
+   * @return delimiter appended key
+   */
+  private String addTrailingSlashIfNeeded(String key) {
+    if (StringUtils.isNotEmpty(key) && !key.endsWith(OZONE_URI_DELIMITER)) {
+      return key + OZONE_URI_DELIMITER;
+    } else {
+      return key;
+    }
+  }
+
+  @Override
+  public String toString() {
+    return "OzoneFileSystem{URI=" + uri + ", "
+        + "workingDir=" + workingDir + ", "
+        + "userName=" + userName + ", "
+        + "statistics=" + statistics
+        + "}";
+  }
+
+  private abstract class OzoneListingIterator {
+    private final Path path;
+    private final FileStatus status;
+    private String pathKey;
+    private Iterator<OzoneKey> keyIterator;
+
+    OzoneListingIterator(Path path)
+        throws IOException {
+      this.path = path;
+      this.status = getFileStatus(path);
+      this.pathKey = pathToKey(path);
+      if (status.isDirectory()) {
+        this.pathKey = addTrailingSlashIfNeeded(pathKey);
+      }
+      keyIterator = bucket.listKeys(pathKey);
+    }
+
+    abstract boolean processKey(String key) throws IOException;
+
+    // iterates all the keys in the particular path
+    boolean iterate() throws IOException {
+      LOG.trace("Iterating path {}", path);
+      if (status.isDirectory()) {
+        LOG.trace("Iterating directory:{}", pathKey);
+        while (keyIterator.hasNext()) {
+          OzoneKey key = keyIterator.next();
+          LOG.trace("iterating key:{}", key.getName());
+          if (!processKey(key.getName())) {
+            return false;
+          }
+        }
+        return true;
+      } else {
+        LOG.trace("iterating file:{}", path);
+        return processKey(pathKey);
+      }
+    }
+
+    String getPathKey() {
+      return pathKey;
+    }
+
+    boolean pathIsDirectory() {
+      return status.isDirectory();
+    }
+
+    FileStatus getStatus() {
+      return status;
+    }
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/package-info.java b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/package-info.java
new file mode 100644
index 0000000..93e82c3
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/main/java/org/apache/hadoop/fs/ozone/package-info.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+/**
+ * Ozone Filesystem.
+ *
+ * Except for the exceptions, it should all be hidden as implementation details.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+package org.apache.hadoop.fs.ozone;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java
new file mode 100644
index 0000000..a7a53dc
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFSInputStream.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+/**
+ * Test OzoneFSInputStream by reading through multiple interfaces.
+ */
+public class TestOzoneFSInputStream {
+  private static MiniOzoneCluster cluster = null;
+  private static FileSystem fs;
+  private static StorageHandler storageHandler;
+  private static Path filePath = null;
+  private static byte[] data = null;
+
+  /**
+   * Create a MiniDFSCluster for testing.
+   * <p>
+   * Ozone is made active by setting OZONE_ENABLED = true and
+   * OZONE_HANDLER_TYPE_KEY = "distributed"
+   *
+   * @throws IOException
+   */
+  @BeforeClass
+  public static void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.setLong(OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE_IN_MB, 10);
+    cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(10)
+        .build();
+    cluster.waitForClusterToBeReady();
+    storageHandler =
+        new ObjectStoreHandler(conf).getStorageHandler();
+
+    // create a volume and a bucket to be used by OzoneFileSystem
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    UserArgs userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    VolumeArgs volumeArgs = new VolumeArgs(volumeName, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    storageHandler.createBucket(bucketArgs);
+
+    // Fetch the host and port for File System init
+    DatanodeDetails datanodeDetails = cluster.getHddsDatanodes().get(0)
+        .getDatanodeDetails();
+    int port = datanodeDetails.getOzoneRestPort();
+    String host = datanodeDetails.getHostName();
+
+    // Set the fs.defaultFS and start the filesystem
+    String uri = String.format("%s://%s.%s/",
+        Constants.OZONE_URI_SCHEME, bucketName, volumeName);
+    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, uri);
+    fs =  FileSystem.get(conf);
+    int fileLen = 100 * 1024 * 1024;
+    data = DFSUtil.string2Bytes(RandomStringUtils.randomAlphanumeric(fileLen));
+    filePath = new Path("/" + RandomStringUtils.randomAlphanumeric(5));
+    try (FSDataOutputStream stream = fs.create(filePath)) {
+      stream.write(data);
+    }
+  }
+
+  /**
+   * Shutdown MiniDFSCluster.
+   */
+  @AfterClass
+  public static void shutdown() throws IOException {
+    fs.close();
+    storageHandler.close();
+    cluster.shutdown();
+  }
+
+  @Test
+  public void testO3FSSingleByteRead() throws IOException {
+    FSDataInputStream inputStream = fs.open(filePath);
+    byte[] value = new byte[data.length];
+    int i = 0;
+    while(true) {
+      int val = inputStream.read();
+      if (val == -1) {
+        break;
+      }
+      value[i] = (byte)val;
+      Assert.assertEquals("value mismatch at:" + i, value[i], data[i]);
+      i++;
+    }
+    Assert.assertEquals(i, data.length);
+    Assert.assertTrue(Arrays.equals(value, data));
+    inputStream.close();
+  }
+
+  @Test
+  public void testO3FSMultiByteRead() throws IOException {
+    FSDataInputStream inputStream = fs.open(filePath);
+    byte[] value = new byte[data.length];
+    byte[] tmp = new byte[1* 1024 *1024];
+    int i = 0;
+    while(true) {
+      int val = inputStream.read(tmp);
+      if (val == -1) {
+        break;
+      }
+      System.arraycopy(tmp, 0, value, i * tmp.length, tmp.length);
+      i++;
+    }
+    Assert.assertEquals(i * tmp.length, data.length);
+    Assert.assertTrue(Arrays.equals(value, data));
+    inputStream.close();
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java
new file mode 100644
index 0000000..9f94e37
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/TestOzoneFileInterfaces.java
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.RandomStringUtils;
+import org.junit.After;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Time;
+
+import static org.apache.hadoop.fs.ozone.Constants.OZONE_DEFAULT_USER;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test OzoneFileSystem Interfaces.
+ *
+ * This test will test the various interfaces i.e.
+ * create, read, write, getFileStatus
+ */
+@RunWith(Parameterized.class)
+public class TestOzoneFileInterfaces {
+
+  private String rootPath;
+  private String userName;
+
+  /**
+   * Parameter class to set absolute url/defaultFS handling.
+   * <p>
+   * Hadoop file systems could be used in multiple ways: Using the defaultfs
+   * and file path without the schema, or use absolute url-s even with
+   * different defaultFS. This parameter matrix would test both the use cases.
+   */
+  @Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList(new Object[][] {{false, true}, {true, false}});
+  }
+
+  private boolean setDefaultFs;
+
+  private boolean useAbsolutePath;
+
+  private static MiniOzoneCluster cluster = null;
+
+  private static FileSystem fs;
+
+  private static StorageHandler storageHandler;
+
+  public TestOzoneFileInterfaces(boolean setDefaultFs,
+      boolean useAbsolutePath) {
+    this.setDefaultFs = setDefaultFs;
+    this.useAbsolutePath = useAbsolutePath;
+  }
+
+  @Before
+  public void init() throws Exception {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    cluster = MiniOzoneCluster.newBuilder(conf)
+        .setNumDatanodes(3)
+        .build();
+    cluster.waitForClusterToBeReady();
+    storageHandler =
+        new ObjectStoreHandler(conf).getStorageHandler();
+
+    // create a volume and a bucket to be used by OzoneFileSystem
+    userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+    UserArgs userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    VolumeArgs volumeArgs = new VolumeArgs(volumeName, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    storageHandler.createVolume(volumeArgs);
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    storageHandler.createBucket(bucketArgs);
+
+    rootPath = String
+        .format("%s://%s.%s/", Constants.OZONE_URI_SCHEME, bucketName,
+            volumeName);
+    if (setDefaultFs) {
+      // Set the fs.defaultFS and start the filesystem
+      conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, rootPath);
+      fs = FileSystem.get(conf);
+    } else {
+      fs = FileSystem.get(new URI(rootPath + "/test.txt"), conf);
+    }
+  }
+
+  @After
+  public void teardown() throws IOException {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+    IOUtils.closeQuietly(fs);
+    IOUtils.closeQuietly(storageHandler);
+  }
+
+  @Test
+  public void testFileSystemInit() throws IOException {
+    if (setDefaultFs) {
+      assertTrue(
+          "The initialized file system is not OzoneFileSystem but " +
+              fs.getClass(),
+          fs instanceof OzoneFileSystem);
+      assertEquals(Constants.OZONE_URI_SCHEME, fs.getUri().getScheme());
+    }
+  }
+
+  @Test
+  public void testOzFsReadWrite() throws IOException {
+    long currentTime = Time.now();
+    int stringLen = 20;
+    String data = RandomStringUtils.randomAlphanumeric(stringLen);
+    String filePath = RandomStringUtils.randomAlphanumeric(5);
+    Path path = createPath("/" + filePath);
+    try (FSDataOutputStream stream = fs.create(path)) {
+      stream.writeBytes(data);
+    }
+
+    FileStatus status = fs.getFileStatus(path);
+    // The timestamp of the newly created file should always be greater than
+    // the time when the test was started
+    assertTrue("Modification time has not been recorded: " + status,
+        status.getModificationTime() > currentTime);
+
+    try (FSDataInputStream inputStream = fs.open(path)) {
+      byte[] buffer = new byte[stringLen];
+      inputStream.readFully(0, buffer);
+      String out = new String(buffer, 0, buffer.length);
+      assertEquals(data, out);
+    }
+  }
+
+
+  @Test
+  public void testDirectory() throws IOException {
+    String dirPath = RandomStringUtils.randomAlphanumeric(5);
+    Path path = createPath("/" + dirPath);
+    assertTrue("Makedirs returned with false for the path " + path,
+        fs.mkdirs(path));
+
+    FileStatus status = fs.getFileStatus(path);
+    assertTrue("The created path is not directory.", status.isDirectory());
+
+    assertEquals(0, status.getLen());
+
+    FileStatus[] statusList = fs.listStatus(createPath("/"));
+    assertEquals(1, statusList.length);
+    assertEquals(status, statusList[0]);
+
+    FileStatus statusRoot = fs.getFileStatus(createPath("/"));
+    assertTrue("Root dir (/) is not a directory.", status.isDirectory());
+    assertEquals(0, status.getLen());
+
+
+  }
+
+  @Test
+  public void testPathToKey() throws Exception {
+    OzoneFileSystem ozoneFs = (OzoneFileSystem) TestOzoneFileInterfaces.fs;
+
+    assertEquals("a/b/1", ozoneFs.pathToKey(new Path("/a/b/1")));
+
+    assertEquals("user/" + getCurrentUser() + "/key1/key2",
+        ozoneFs.pathToKey(new Path("key1/key2")));
+
+    assertEquals("key1/key2",
+        ozoneFs.pathToKey(new Path("o3://test1/key1/key2")));
+  }
+
+  private String getCurrentUser() {
+    try {
+      return UserGroupInformation.getCurrentUser().getShortUserName();
+    } catch (IOException e) {
+      return OZONE_DEFAULT_USER;
+    }
+  }
+
+  private Path createPath(String relativePath) {
+    if (useAbsolutePath) {
+      return new Path(
+          rootPath + (relativePath.startsWith("/") ? "" : "/") + relativePath);
+    } else {
+      return new Path(relativePath);
+    }
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractCreate.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractCreate.java
new file mode 100644
index 0000000..dd54315
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractCreate.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+/**
+ * Ozone contract tests creating files.
+ */
+public class ITestOzoneContractCreate extends AbstractContractCreateTest {
+
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractDelete.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractDelete.java
new file mode 100644
index 0000000..f0a3d8d
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractDelete.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+/**
+ * Ozone contract tests covering deletes.
+ */
+public class ITestOzoneContractDelete extends AbstractContractDeleteTest {
+
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractDistCp.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractDistCp.java
new file mode 100644
index 0000000..134a9ad
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractDistCp.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.tools.contract.AbstractContractDistCpTest;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+
+/**
+ * Contract test suite covering S3A integration with DistCp.
+ * Uses the block output stream, buffered to disk. This is the
+ * recommended output mechanism for DistCP due to its scalability.
+ */
+public class ITestOzoneContractDistCp extends AbstractContractDistCpTest {
+
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected OzoneContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractGetFileStatus.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractGetFileStatus.java
new file mode 100644
index 0000000..98bbb14
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractGetFileStatus.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractGetFileStatusTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+/**
+ * Ozone contract tests covering getFileStatus.
+ */
+public class ITestOzoneContractGetFileStatus
+    extends AbstractContractGetFileStatusTest {
+
+
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+
+  @Override
+  public void teardown() throws Exception {
+    getLog().info("FS details {}", getFileSystem());
+    super.teardown();
+  }
+
+  @Override
+  protected Configuration createConfiguration() {
+    return super.createConfiguration();
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractMkdir.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractMkdir.java
new file mode 100644
index 0000000..bc0de5d
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractMkdir.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+/**
+ * Test dir operations on Ozone.
+ */
+public class ITestOzoneContractMkdir extends AbstractContractMkdirTest {
+
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractOpen.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractOpen.java
new file mode 100644
index 0000000..0bc57d4
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractOpen.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+/**
+ * Ozone contract tests opening files.
+ */
+public class ITestOzoneContractOpen extends AbstractContractOpenTest {
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractRename.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractRename.java
new file mode 100644
index 0000000..8ce1d1b
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractRename.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+/**
+ * Ozone contract tests covering rename.
+ */
+public class ITestOzoneContractRename extends AbstractContractRenameTest {
+
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractRootDir.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractRootDir.java
new file mode 100644
index 0000000..3156eb2
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractRootDir.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+
+import java.io.IOException;
+
+/**
+ * Ozone contract test for ROOT directory operations.
+ */
+public class ITestOzoneContractRootDir extends
+    AbstractContractRootDirectoryTest {
+
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractSeek.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractSeek.java
new file mode 100644
index 0000000..c4bc0ff
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/ITestOzoneContractSeek.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+
+/**
+ * Ozone contract tests covering file seek.
+ */
+public class ITestOzoneContractSeek extends AbstractContractSeekTest {
+  @BeforeClass
+  public static void createCluster() throws IOException {
+    OzoneContract.createCluster();
+  }
+
+  @AfterClass
+  public static void teardownCluster() throws IOException {
+    OzoneContract.destroyCluster();
+  }
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new OzoneContract(conf);
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/OzoneContract.java b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/OzoneContract.java
new file mode 100644
index 0000000..3848bc8
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/java/org/apache/hadoop/fs/ozone/contract/OzoneContract.java
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.ozone.contract;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.apache.hadoop.fs.ozone.Constants;
+import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.client.rest.OzoneException;
+import org.apache.hadoop.ozone.web.handlers.BucketArgs;
+import org.apache.hadoop.ozone.web.handlers.UserArgs;
+import org.apache.hadoop.ozone.web.handlers.VolumeArgs;
+import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.ozone.ksm.KSMConfigKeys;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.junit.Assert;
+
+import java.io.IOException;
+
+/**
+ * The contract of Ozone: only enabled if the test bucket is provided.
+ */
+class OzoneContract extends AbstractFSContract {
+
+  private static MiniOzoneCluster cluster;
+  private static StorageHandler storageHandler;
+  private static final String CONTRACT_XML = "contract/ozone.xml";
+
+  OzoneContract(Configuration conf) {
+    super(conf);
+    //insert the base features
+    addConfResource(CONTRACT_XML);
+  }
+
+  @Override
+  public String getScheme() {
+    return Constants.OZONE_URI_SCHEME;
+  }
+
+  @Override
+  public Path getTestPath() {
+    Path path = new Path("/test");
+    return path;
+  }
+
+  public static void createCluster() throws IOException {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.addResource(CONTRACT_XML);
+
+    cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(5).build();
+    try {
+      cluster.waitForClusterToBeReady();
+    } catch (Exception e) {
+      throw new IOException(e);
+    }
+    storageHandler = new ObjectStoreHandler(conf).getStorageHandler();
+  }
+
+  private void copyClusterConfigs(String configKey) {
+    getConf().set(configKey, cluster.getConf().get(configKey));
+  }
+
+  @Override
+  public FileSystem getTestFileSystem() throws IOException {
+    //assumes cluster is not null
+    Assert.assertNotNull("cluster not created", cluster);
+
+    String userName = "user" + RandomStringUtils.randomNumeric(5);
+    String adminName = "admin" + RandomStringUtils.randomNumeric(5);
+    String volumeName = "volume" + RandomStringUtils.randomNumeric(5);
+    String bucketName = "bucket" + RandomStringUtils.randomNumeric(5);
+
+
+    UserArgs userArgs = new UserArgs(null, OzoneUtils.getRequestID(),
+        null, null, null, null);
+    VolumeArgs volumeArgs = new VolumeArgs(volumeName, userArgs);
+    volumeArgs.setUserName(userName);
+    volumeArgs.setAdminName(adminName);
+    BucketArgs bucketArgs = new BucketArgs(volumeName, bucketName, userArgs);
+    try {
+      storageHandler.createVolume(volumeArgs);
+      storageHandler.createBucket(bucketArgs);
+    } catch (OzoneException e) {
+      throw new IOException(e.getMessage());
+    }
+
+    String uri = String.format("%s://%s.%s/",
+        Constants.OZONE_URI_SCHEME, bucketName, volumeName);
+    getConf().set("fs.defaultFS", uri);
+    copyClusterConfigs(KSMConfigKeys.OZONE_KSM_ADDRESS_KEY);
+    copyClusterConfigs(ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY);
+    return FileSystem.get(getConf());
+  }
+
+  public static void destroyCluster() throws IOException {
+    if (cluster != null) {
+      cluster.shutdown();
+      cluster = null;
+    }
+  }
+}
diff --git a/hadoop-tools/hadoop-ozone/src/test/resources/contract/ozone.xml b/hadoop-tools/hadoop-ozone/src/test/resources/contract/ozone.xml
new file mode 100644
index 0000000..fe2075c
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/resources/contract/ozone.xml
@@ -0,0 +1,113 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~  or more contributor license agreements.  See the NOTICE file
+  ~  distributed with this work for additional information
+  ~  regarding copyright ownership.  The ASF licenses this file
+  ~  to you under the Apache License, Version 2.0 (the
+  ~  "License"); you may not use this file except in compliance
+  ~  with the License.  You may obtain a copy of the License at
+  ~
+  ~       http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~  Unless required by applicable law or agreed to in writing, software
+  ~  distributed under the License is distributed on an "AS IS" BASIS,
+  ~  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~  See the License for the specific language governing permissions and
+  ~  limitations under the License.
+  -->
+
+<configuration>
+  <!--
+  Ozone is a blobstore, with very different behavior than a classic filesystem.
+  -->
+
+    <property>
+        <name>fs.contract.test.root-tests-enabled</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.test.random-seek-count</name>
+        <value>10</value>
+    </property>
+
+    <property>
+        <name>fs.contract.is-blobstore</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.create-visibility-delayed</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.is-case-sensitive</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.rename-returns-false-if-source-missing</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.rename-remove-dest-if-empty-dir</name>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-append</name>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-atomic-directory-delete</name>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-atomic-rename</name>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-block-locality</name>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-concat</name>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-getfilestatus</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-seek</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-seek-on-closed-file</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.rejects-seek-past-eof</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-strict-exceptions</name>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>fs.contract.supports-unix-permissions</name>
+        <value>false</value>
+    </property>
+</configuration>
diff --git a/hadoop-tools/hadoop-ozone/src/test/resources/log4j.properties b/hadoop-tools/hadoop-ozone/src/test/resources/log4j.properties
new file mode 100644
index 0000000..3bf1619
--- /dev/null
+++ b/hadoop-tools/hadoop-ozone/src/test/resources/log4j.properties
@@ -0,0 +1,23 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+# log4j configuration used during build and unit tests
+
+log4j.rootLogger=INFO,stdout
+log4j.threshold=ALL
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) - %m%n
+
+log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
+
+# for debugging low level Ozone operations, uncomment this line
+# log4j.logger.org.apache.hadoop.ozone=DEBUG
diff --git a/hadoop-tools/hadoop-tools-dist/pom.xml b/hadoop-tools/hadoop-tools-dist/pom.xml
index 42ce94c..21cc7cef 100644
--- a/hadoop-tools/hadoop-tools-dist/pom.xml
+++ b/hadoop-tools/hadoop-tools-dist/pom.xml
@@ -192,5 +192,20 @@
         </plugins>
       </build>
     </profile>
+
+    <profile>
+      <id>hdds</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-ozone-filesystem</artifactId>
+          <scope>compile</scope>
+          <version>${project.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
   </profiles>
 </project>
diff --git a/hadoop-tools/pom.xml b/hadoop-tools/pom.xml
index 92f585f..f421e58 100644
--- a/hadoop-tools/pom.xml
+++ b/hadoop-tools/pom.xml
@@ -67,5 +67,15 @@
       </plugin>
     </plugins>
   </build>
-
+  <profiles>
+    <profile>
+      <id>hdds</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <modules>
+        <module>hadoop-ozone</module>
+      </modules>
+    </profile>
+  </profiles>
 </project>
diff --git a/pom.xml b/pom.xml
index c761493..0e7b23a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -115,6 +115,9 @@
     <dependency-check-maven.version>1.4.3</dependency-check-maven.version>
 
     <shell-executable>bash</shell-executable>
+
+    <!-- version for hdds/ozone components -->
+    <hdds.version>0.2.1-SNAPSHOT</hdds.version>
   </properties>
 
   <modules>
@@ -386,6 +389,8 @@
             <exclude>**/build/**</exclude>
             <exclude>**/patchprocess/**</exclude>
             <exclude>**/*.js</exclude>
+            <exclude>hadoop-hdds/**/</exclude>
+            <exclude>hadoop-ozone/**/</exclude>
          </excludes>
        </configuration>
       </plugin>
@@ -587,6 +592,62 @@
         </plugins>
       </build>
     </profile>
+    <profile>
+      <id>hdds-src</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <inherited>false</inherited>
+            <executions>
+              <execution>
+                <id>src-dist</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <finalName>hadoop-${project.version}-src-with-hdds</finalName>
+                  <outputDirectory>hadoop-dist/target</outputDirectory>
+                  <!-- Not using descriptorRef and hadoop-assembly dependency -->
+                  <!-- to avoid making hadoop-main to depend on a module      -->
+                  <descriptors>
+                    <descriptor>hadoop-assemblies/src/main/resources/assemblies/hadoop-src-with-hdds.xml</descriptor>
+                  </descriptors>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <inherited>false</inherited>
+            <executions>
+              <execution>
+                <id>src-dist-msg</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <target>
+                    <echo/>
+                    <echo>Hadoop source tar (including HDDS) available at: ${basedir}/hadoop-dist/target/hadoop-${project.version}-src-with-hdds.tar.gz</echo>
+                    <echo/>
+                  </target>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
 
     <profile>
       <id>sign</id>
@@ -683,5 +744,16 @@
         </plugins>
       </build>
     </profile>
+    <profile>
+      <id>hdds</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <modules>
+        <module>hadoop-ozone</module>
+        <module>hadoop-hdds</module>
+        <module>hadoop-ozone/acceptance-test</module>
+      </modules>
+    </profile>
   </profiles>
 </project>